static void Main( string[] args ) { using( var stream = File.OpenRead( @"C:\Users\Josh\Documents\test.csv" ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { while( parser.Read() != null ) { } } }
private static IEnumerable <string[]> ReadInternal(System.IO.Stream stream) { using (var reader = new System.IO.StreamReader(stream)) { var configuration = new CSV.Configuration.Configuration { Delimiter = CultureInfo.CurrentCulture.TextInfo.ListSeparator, CultureInfo = CultureInfo.CurrentCulture, AllowComments = true, Comment = '#' }; using (var parser = new CSV.CsvParser(reader, configuration)) { while (true) { var fields = parser.Read(); if (fields == null) { break; } yield return(fields); } } } }
private void button2_Click(object sender, EventArgs e) //assign elements button { try { StreamReader csvFile = new StreamReader(filePath.Text); var csvData = new CsvParser(csvFile); //var row = csvData.Read(); dataGridView1.DataSource = DTAcctLog; while (true) { var row = csvData.Read(); //if (row = row.Empty) //attemping to handle nulls - if I figure something out this can be deleted. //{ //} if (row == null) { break; } //resultsTest.Text = row[0] + " " + row[1] + " " + row[2] + " " + row[3]; //foreach (string element in row) I might want this later - I like this. //{ } DTAcctLog.Rows.Add(row); //DTAcctLog.Rows.Add(row[0], row[1], row[2], row[0]); //DTAcctLog.Rows.Add(row[0], row[1], row[2], row[0]); //each row add propagates the columns for the row in order. A new row add row method will start a new row. dataGridView1.Refresh(); //refresh the data grid view when you grab fresh data. sendtoDB.Show(); } } catch (Exception ex) { MessageBoxHelper.PrepToCenterMessageBoxOnForm(this); MessageBox.Show("You f****d up\n" + ex.Message); } //DTAcctLog.Rows.Add(1, 2); //git test //dataGridView1.Refresh(); //refresh the data grid view when you grab fresh data. }
public IEnumerable<CsvModel> GetCsvModels(TextReader tr, string delimiter) { List<CsvModel> models = new List<CsvModel>(); using (var csvParser = new CsvParser(tr, new CsvHelper.Configuration.CsvConfiguration() { Delimiter = delimiter } )) { var headerRow = csvParser.Read(); int classIndex = headerRow.GetIndexOf(CsvHeaders.Class); int objectTypeIndex = headerRow.GetIndexOf(CsvHeaders.ObjectType); int sourceIndex = headerRow.GetIndexOf(CsvHeaders.Source); if (classIndex == -1) throw new ArgumentException("missing column 'Class'!"); if (objectTypeIndex == -1) throw new ArgumentException("missing column 'Object Type'!"); string[] row = null; while ((row = csvParser.Read()) != null) { var csvModel = new CsvModel(); csvModel.ObjectType = row[objectTypeIndex]; csvModel.Class = row[classIndex]; for (int i = 0; i < row.Length; i++) { if (i == sourceIndex) { var sourceFiles = row[i].Split(new char[] { Constants.SourceFileDelimiter }, StringSplitOptions.RemoveEmptyEntries); foreach (var sourceFile in sourceFiles) csvModel.SourceFiles.Add(sourceFile); } else if (i != classIndex && i != objectTypeIndex && headerRow[i] != CsvHeaders.Source) csvModel.Values.Add(headerRow[i], row[i]); } models.Add(csvModel); } } return models; }
private IEnumerable<Dictionary<string, string>> GetRows(FileInfo file) { using (var stream = file.OpenText()) { var parser = new CsvParser(stream, new CsvConfiguration{ Delimiter = ";"}); Fields = parser.Read(); string[] current; while ((current = parser.Read()) != null) { var row = new Dictionary<string, string>(); for (int i = 0; i < Fields.Length; i++) { row.Add(Fields[i], current[i]); } yield return row; } stream.Close(); } }
public static List<string> GetHeaderNames(string FilePath) { List<string> headers = new List<string>(); var fileReader = File.OpenText(FilePath); var parser = new CsvParser(fileReader); headers = parser.Read().ToList(); fileReader.Close(); headers.RemoveAll(o => string.IsNullOrWhiteSpace(o)); return headers; }
public IEnumerable <string[]> Parse(string csv) { using (var stream = StreamCreator.Create(csv, Encoding)) using (var reader = new System.IO.StreamReader(stream, Encoding)) using (var parser = new CsvHelper.CsvParser(reader, new Configuration { IgnoreBlankLines = IgnoreBlankLines, Delimiter = Delimiter, TrimOptions = TrimOptions.Trim | TrimOptions.InsideQuotes, })) { string[] fields; while ((fields = parser.Read()) != null) { yield return(fields); } } }
//--------------------------------------------- private static List <string[]> ReadStringCSVHelper(string str) { TextReader reader = new StringReader(str); var parser = new CsvHelper.CsvParser(reader); List <string[]> csvList = new List <string[]>(); while (true) { var row = parser.Read(); if (row != null) { csvList.Add(row); } else { break; } } return(csvList); }
private static IEnumerable <string[]> ReadFields(Stream stream) { using var reader = new StreamReader(stream); var configuration = new CSV.Configuration.CsvConfiguration(CultureInfo.CurrentCulture) { Delimiter = CultureInfo.CurrentCulture.TextInfo.ListSeparator, AllowComments = true, Comment = '#' }; using var csvParser = new CSV.CsvParser(reader, configuration); while (csvParser.Read()) { var fields = csvParser.Record; if (fields is null) { break; } yield return(fields); } }
private static List <string[]> ReadFileCSVHelper(string filename) { StreamReader reader = File.OpenText(filename); var parser = new CsvHelper.CsvParser(reader); List <string[]> csvList = new List <string[]>(); while (true) { var row = parser.Read(); if (row != null) { csvList.Add(row); } else { break; } } return(csvList); }
public async Task<HttpResponseMessage> Put(string indexName) { var request = Request; if (!request.Content.IsMimeMultipartContent()) throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); // Get the current index. var index = await _managementClient.GetIndexAsync(indexName); if (!index.IsSuccess) return Request.CreateResponse(index.StatusCode, index); var keyField = index.Body.Fields.FirstOrDefault(f => f.Key); if (keyField == null) return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, "Unable to find key field."); // Read all files. var root = System.Web.HttpContext.Current.Server.MapPath("~/App_Data/imports"); if (!Directory.Exists(root)) Directory.CreateDirectory(root); var provider = new MultipartFormDataStreamProvider(root); await request.Content.ReadAsMultipartAsync(provider); // Operations. var operations = new List<IndexOperation>(); // Process all files. foreach (var file in provider.FileData) { using (var streamReader = new StreamReader(file.LocalFileName)) { var parser = new CsvParser(streamReader); parser.Configuration.Delimiter = CloudConfigurationManager.GetSetting("CsvDelimiter"); var header = parser.Read(); if (header == null) return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, "The CSV file does not contain a header."); var columns = header.ToList(); if (columns.IndexOf(keyField.Name) < 0) return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, "The CSV file does not contain the key field."); // Process all records. while (true) { var row = parser.Read(); if (row == null) break; // Create a new operation. var operation = new IndexOperation(IndexOperationType.Upload, keyField.Name, row[columns.IndexOf(keyField.Name)]); for (int i = 0; i < row.Length; i++) { var columnName = columns[i]; if (columnName != keyField.Name) { var field = index.Body.Fields.FirstOrDefault(f => f.Name == columnName); if (field == null) return Request.CreateErrorResponse(HttpStatusCode.InternalServerError, "Unknown field: " + field.Name); if (field.Type == FieldType.StringCollection) { operation.Properties.Add(columnName, row[i].Contains("/") ? row[i].Split('/') : new[] {row[i]}); } else if (field.Type == FieldType.Double) { double doubleValue = 0; double.TryParse(row[i], out doubleValue); operation.Properties.Add(columnName, doubleValue); } else if (field.Type == FieldType.Integer) { int intValue = 0; int.TryParse(row[i], out intValue); operation.Properties.Add(columnName, intValue); } else if (field.Type == FieldType.Boolean) { bool booleanValue = false; bool.TryParse(row[i], out booleanValue); operation.Properties.Add(columnName, booleanValue); } else if (field.Type == FieldType.DateTimeOffset) { DateTimeOffset dateValue = DateTimeOffset.MinValue; DateTimeOffset.TryParse(row[i], out dateValue); operation.Properties.Add(columnName, dateValue); } else if (field.Type == FieldType.GeographyPoint) { if (row[i].Contains('|')) { var coordinates = row[i].Split('|'); operation.Properties.Add(columnName, new { type = "Point", coordinates = new[] { double.Parse(coordinates[0], CultureInfo.InvariantCulture), // Latitude double.Parse(coordinates[1], CultureInfo.InvariantCulture) // Longitude }}); } } else { operation.Properties.Add(columnName, row[i]); } } } // Add operation to batch. operations.Add(operation); } } } // Populate. var result = await _managementClient.PopulateAsync(indexName, operations.ToArray()); if (!result.IsSuccess) return Request.CreateResponse(result.StatusCode, result); return Request.CreateResponse(HttpStatusCode.OK, result); }
private ParseResult ProcessSourceContent(byte[] data) { ParseResult ret = new ParseResult(); ret.Csv = new ParseResult.CsvParseResult(); string[] headerNames = null; List <Dictionary <string, object> > dicts = new List <Dictionary <string, object> >(); int rows = 0; int columns = 0; _CsvConfiguration.Delimiter = _ParseOptions.Csv.ColumnDelimiter.ToString(); _CsvConfiguration.TrimOptions = TrimOptions.Trim; using (MemoryStream ms = new MemoryStream()) { ms.Write(data, 0, data.Length); ms.Seek(0, SeekOrigin.Begin); using (TextReader tr = new StreamReader(ms)) { using (CsvHelper.CsvParser cp = new CsvHelper.CsvParser(tr, _CsvConfiguration)) { while (cp.Read()) { if (cp.Record != null && cp.Record.Length > 0) { if (rows == 0) { headerNames = cp.Record; List <string> headerNamesList = headerNames.Distinct().ToList(); if (headerNamesList.Count != headerNames.Length) { throw new DuplicateNameException("Supplied CSV contains headers that would create duplicate columns."); } columns = headerNames.Length; } else { Dictionary <string, object> dict = new Dictionary <string, object>(); for (int i = 0; i < cp.Record.Length; i++) { if (headerNames.Length > i && !String.IsNullOrEmpty(headerNames[i])) { dict.Add(headerNames[i], cp.Record[i]); } else { dict.Add(_ParseOptions.Csv.UnknownColumnPrefix + i.ToString(), cp.Record[i]); } } if (cp.Record.Length != columns) { ret.Csv.Irregular = true; } if (cp.Record.Length > columns) { columns = cp.Record.Length; } dicts.Add(dict); } rows++; } else { break; } } } } } if (dicts != null && dicts.Count > 0) { foreach (Dictionary <string, object> dict in dicts) { foreach (KeyValuePair <string, object> kvp in dict) { ret.Flattened.Add(new DataNode(kvp.Key, kvp.Value, DataNode.TypeFromValue(kvp.Value))); } } } ret.Schema = ParserCommon.BuildSchema(ret.Flattened); ret.Tokens = ParserCommon.GetTokens(ret.Flattened, _TextParser); ret.Csv.Rows = rows; ret.Csv.Columns = columns; ret.Success = true; ret.Time.End = DateTime.UtcNow; return(ret); }
private static Task<Dictionary<string, string>> ParseLogAsync(string path) { return Task.Run(async () => { Dictionary<string, string> json = new Dictionary<string, string>(); if (!File.Exists(path)) return json; using (StreamReader sr = new StreamReader(path)) { var config = new CsvConfiguration(); config.HasHeaderRecord = true; config.TrimFields = true; config.TrimHeaders = true; var parser = new CsvParser(sr, config); string[] row = parser.Read(); var fields = UniqueHeaders(row); do { var counterT = _bucket.IncrementAsync("counter"); row = parser.Read(); if (row == null) break; if (fields.Length != row.Length) { Console.WriteLine("Warning, header count does not match line count. Headers: {0}, Lines: {1}", fields.Length, row.Length); } Dictionary<string, object> dict = new Dictionary<string, object>(); for (int i = 0; i < fields.Length && i < row.Length; i++) dict.Add(fields[i], ParseValue(row[i])); #region Debug if (_debug) { if (dict.ContainsKey("Log_Time")) { var date = DateTime.Parse(dict["Log_Time"].ToString()); var newDate = date.AddMinutes(new Random().Next(43200) - 43200 / 2); dict["Log_Time"] = newDate.ToString("yyyy-MM-dd HH:mm:ss"); } } #endregion var counter = await counterT; json.Add(counter.Value.ToString(), JsonConvert.SerializeObject(dict)); } while (row != null); fields = MergeSchema(fields); json.Add("schema", JsonConvert.SerializeObject(fields)); } return json; }); }
static void Main(string[] args) { // Arg0 = Input file string inputFile = ""; List<string> outputFile = new List<string>(); if (args.Length > 0) { if (File.Exists(args[0])) { inputFile = args[0]; } } if (inputFile == "") Environment.Exit(0); XDocument config; List<int> bringTheseColumnsDown = new List<int>(); List<int> explodeTheseColumns = new List<int>(); if (!File.Exists(configFilePath)) { Console.WriteLine("New Config"); CreateConfig(); } config = XDocument.Load(configFilePath); var bringDowns = from el in config.Root.Element("BringDownColumns").Elements("Column") select el; bringTheseColumnsDown = bringDowns.Select(x => ConfigParseColumn(x.Value)).ToList(); var explodeLines = from el in config.Root.Element("ExplodeColumns").Elements("Column") select el; explodeTheseColumns = explodeLines.Select(x => ConfigParseColumn(x.Value)).ToList(); var outputPath = from el in config.Root.Element("OutputDirectory").Elements("Path") select el; foreach (var p in outputPath) { string path = Environment.ExpandEnvironmentVariables(p.Value); outputFile.Add(Path.Combine((path == "" ? Path.GetDirectoryName(inputFile) : path), Path.GetFileNameWithoutExtension(inputFile) + " output" + Path.GetExtension(inputFile))); } Console.WriteLine("Input: " + inputFile); foreach (var b in outputFile) Console.WriteLine("Output: " + b); // Read using CSV Helper var oldFile = new List<List<string>>(); using (var reader = new StreamReader(inputFile)) { var csvParser = new CsvParser(reader); while (true) { var readLine = csvParser.Read(); if (readLine == null) break; oldFile.Add(readLine.ToList()); } } int totalColumns = oldFile.First().Count; // We'll just rebuild the file we want, it's easier than inserting into the old one var newFile = new List<string>(); // First read row by row for (int row = 0; row < oldFile.Count; row++) { // Preserve each row of the old file newFile.Add(string.Join(",", oldFile[row]) + "\n"); // We explode each string into an array, and we put this in a list // where each entry in the list is one column var newLinesInColumn = new List<string[]>(); // How many new rows we need to insert int amountOfNewRows = 0; // Loop through all the columns on the row, // splitting them by forward slashes for (int column = 0; column < totalColumns; column++) { if (explodeTheseColumns.Contains(column)) { string[] splitColumn = oldFile[row][column].Split('/'); // We only need to add as many new rows as the // highest amount of splits if (amountOfNewRows < splitColumn.Length) amountOfNewRows = splitColumn.Length; newLinesInColumn.Add(splitColumn); } else if (bringTheseColumnsDown.Contains(column)) { newLinesInColumn.Add(new string[] { oldFile[row][column] }); } else { newLinesInColumn.Add(new string[]{ }); } } var newLine = new StringBuilder(); // Now we need to add the new rows // Only add a new row if we have more than 1 to add, the old row has already been // added so this should only be 2 or more when a string was exploded if (amountOfNewRows > 1) { for (int newRow = 0; newRow < amountOfNewRows; newRow++) { // Make sure we get the same amount of columns for (int newColumn = 0; newColumn < totalColumns; newColumn++) { // If there is something in the column to insert, insert it if (newLinesInColumn[newColumn].Length > newRow) { // Add the exploded data into the new column newLine.Append(newLinesInColumn[newColumn][newRow]); } // If there is no exploded data to insert but we want to bring this column down else if (bringTheseColumnsDown.Contains(newColumn)) { // Insert the first value into the column, which is always // the same whether it was exploded or not newLine.Append(newLinesInColumn[newColumn][0]); } // If it's the last entry on the line, just put newline // Otherwise, whether we put something on this line or not, put a comma newLine.Append(newColumn == totalColumns - 1 ? "\n" : ","); } } } newFile.Add(newLine.ToString()); } foreach (var o in outputFile) { using (var writer = new StreamWriter(o)) { foreach (var c in newFile) { writer.Write(c); } } } Console.WriteLine("Finished!"); Console.ReadKey(); }
//--------------------------------------------- private static List<string[]> ReadStringCSVHelper(string str) { TextReader reader = new StringReader(str); var parser = new CsvHelper.CsvParser(reader); List<string[]> csvList = new List<string[]>(); while (true) { var row = parser.Read(); if (row != null) { csvList.Add(row); } else { break; } } return csvList; }
private static List<string[]> ReadFileCSVHelper(string filename) { StreamReader reader = File.OpenText(filename); var parser = new CsvHelper.CsvParser(reader); List<string[]> csvList = new List<string[]>(); while (true) { var row = parser.Read(); if (row != null) { csvList.Add(row); } else { break; } } return csvList; }
private static Dictionary<string, string> GetGenreDictionary() { var assembly = Assembly.GetExecutingAssembly(); var file = assembly.GetManifestResourceNames().FirstOrDefault(x => x.Contains("GenreMapping.csv")); var stream = assembly.GetManifestResourceStream(file); var dict = new Dictionary<string, string>(); using (var sr = new StreamReader(stream)) { var parser = new CsvParser(sr); while (true) { var row = parser.Read(); if (row == null) break; dict.Add(row[0].ToUpper(), row[1]); } } return dict; }