public TradeDataEntry[] Read(string filePath) { using (var parser = new TextFieldParser(filePath)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); var tradeDataEntries = new List<TradeDataEntry>(); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields != null) { if (fields.Length != 6) { return new TradeDataEntry[] { }; } var date = DateTime.ParseExact(fields[0], "yyyy-M-d", null); var open = double.Parse(fields[1], CultureInfo.InvariantCulture); var high = double.Parse(fields[2], CultureInfo.InvariantCulture); var low = double.Parse(fields[3], CultureInfo.InvariantCulture); var close = double.Parse(fields[4], CultureInfo.InvariantCulture); var volume = int.Parse(fields[5]); tradeDataEntries.Add(new TradeDataEntry(date, open, high, low, close, volume)); } } return tradeDataEntries.ToArray(); } }
public ReportFile(string path) { fileInfo = new FileInfo(path); if (!fileInfo.Exists) { CreateReportFile(fileInfo); } var parser = new TextFieldParser(path) { Delimiters = new[] { "," } }; if (!parser.EndOfData) { var headerFields = parser.ReadFields(); var langList = new List<string>(); // skip Date/Time and Word Count column headers for (var i = 2; i < headerFields.Length; ++i) { var lang = headerFields[i]; langList.Add(lang); langs.Add(lang); } while (!parser.EndOfData) { rows.Add(new ReportRow(langList.ToArray(), parser.ReadFields())); } } parser.Close(); }
public DataTable GetDataTableColumns(string csv_file_path) { DataTable csvData = new DataTable(); try { using (TextFieldParser csvReader = new TextFieldParser(csv_file_path)) { csvReader.SetDelimiters(new string[] { "," }); csvReader.HasFieldsEnclosedInQuotes = true; string[] colFields = csvReader.ReadFields(); for (int i = 0; i < colFields.Length; i++) { colFields[i] = this.RemoveSpecialCharacters(colFields[i]); } foreach (string column in colFields) { DataColumn datecolumn = new DataColumn(column); datecolumn.AllowDBNull = true; csvData.Columns.Add(datecolumn); } } } catch (Exception ex) { //TO DO } return csvData; }
public static IEnumerable<object> ReadCsv(Stream stream) { var parser = new TextFieldParser(stream) { TextFieldType = FieldType.Delimited, Delimiters = new[] { "," }, HasFieldsEnclosedInQuotes = true, TrimWhiteSpace = true }; string[] fields = parser.ReadFields(); while (!parser.EndOfData) { string[] row = parser.ReadFields(); var data = (IDictionary<string, object>)new ExpandoObject(); for (int i = 0; i < fields.Length; i++) { data[fields[i]] = row[i]; } yield return data; } }
/// <summary> /// Uses the Microsoft Text Field Parser to parse the FixedWidth file, to give a baseline /// against an established class library for parsing. /// </summary> private static void _TextFieldParserFixedWidth() { string[] fields; string s; using (VB.TextFieldParser tfp = new VB.TextFieldParser(PerformanceTests.FW_DATA_FILE)) { tfp.TextFieldType = VB.FieldType.FixedWidth; tfp.SetFieldWidths(new int[PerformanceTests.NUMBER_OF_COLUMNS_IN_DATA] { 5, 5, 1, 28, 42, 15, 13, 9, 9, 1, 13, 14, 13, 6 }); tfp.CommentTokens = new string[] { "#" }; tfp.HasFieldsEnclosedInQuotes = true; while (!tfp.EndOfData) { fields = tfp.ReadFields(); for (int i = 0; i < fields.Length; ++i) { s = fields[i] as string; } } } }
/// <summary> /// inladen van het CSV bestand met verschillende diersoorten /// </summary> /// <returns>lijst van ingeladen dieren</returns> public List<Diersoort> Load() { List<Diersoort> loadedAnimals = new List<Diersoort>(); //Get all the animal names from the local storage using (TextFieldParser parser = new TextFieldParser(FilePath + "broedvogels.csv")) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); foreach (string field in fields) { string[] columns = field.Split(';'); string[] startDatetime = columns[2].Split('-'); string[] endDatetime = columns[3].Split('-'); DateTime sdt = new DateTime(2000, Convert.ToInt32(startDatetime[1]), Convert.ToInt32(startDatetime[0])); DateTime edt = new DateTime(2000, Convert.ToInt32(endDatetime[1]), Convert.ToInt32(endDatetime[0])); Diersoort toAdd = new Vogel( columns[0], columns[1], sdt, edt, Convert.ToInt32(columns[4])); loadedAnimals.Add(toAdd); } } } return loadedAnimals; }
public IEnumerable<Brand> Read() { using (TextFieldParser parser = new TextFieldParser(path)) { parser.CommentTokens = new string[] { "#" }; parser.SetDelimiters(new string[] { ";" }); parser.HasFieldsEnclosedInQuotes = true; // Skip over header line. parser.ReadLine(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); yield return new Brand() { Name = fields[0], FactoryLocation = fields[1], EstablishedYear = int.Parse(fields[2]), Profit = double.Parse(fields[3], swedishCulture) }; } } }
public static ILeMondDataProvider Create(SourcedStream sourcedStream) { var parser = new TextFieldParser(sourcedStream.Stream); parser.TextFieldType = FieldType.Delimited; parser.Delimiters = new[] { "," }; if (parser.EndOfData) { throw new Exception(string.Format("The file {0} does not seem to be a valid LeMond .csv file because it is empty.", sourcedStream.Source)); } var row = parser.ReadFields(); if (!(row.Length >= 1 && row[0] == "LeMond")) { throw new Exception(string.Format("The file {0} does not seem to be a valid LeMond .csv file because it doesn't say 'LeMond' in the first field.", sourcedStream.Source)); } if (row.Length >= 4 && row[3] == "gforce") { return new LeMondGForceCsvDataProvider(sourcedStream.Source, parser, row); } if (row.Length >= 4 && row[3] == "STN") { return new LeMondGForceSTNCsvDataProvider(sourcedStream.Source, parser, row); } else if (row.Length >= 2 && row[1] == "Revolution") { return new LeMondRevolutionCsvDataProvider(sourcedStream.Source, parser, row); } throw new Exception(string.Format("Not a recognized LeMond device. Header = '{0}'", string.Join(",", row))); }
public ProviderMgr() { TextFieldParser parser = new TextFieldParser(@"" + Directory.GetCurrentDirectory() + "\\dnscrypt-resolvers.csv"); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { ProviderItem providerItem = new ProviderItem(); string[] fields = parser.ReadFields(); providerItem.setName(fields[0]); providerItem.setFullName(fields[1]); providerItem.setDescription(fields[2]); providerItem.setLocation(fields[3]); providerItem.setCoordinates(fields[4]); providerItem.setURL(fields[5]); providerItem.setVersion(fields[6]); providerItem.setDNSSEC(fields[7]); providerItem.setNoLogs(fields[8]); providerItem.setNamecoin(fields[9]); providerItem.setAddress(fields[10]); providerItem.setProviderName(fields[11]); providerItem.setPublicKey(fields[12]); providerItem.setPublicKeyTXT(fields[13]); providerList.Add(providerItem); } parser.Close(); // Remove first line from CVS (Name, etc, etc) providerList.RemoveAt(0); }
// ファイル入力 // readFileName:入力先のファイル名 // return:行ごとの情報を保持するリスト public static List<String[]> read(String readFileName) { // 読み込んできた行を保持するインスタンス List<String[]> readLines = new List<String[]>(); try { // テキストを読み込むためのインスタンス TextFieldParser parser = new TextFieldParser(readFileName); // 分割される状態をセット parser.TextFieldType = FieldType.Delimited; // 区切り文字の設定 parser.SetDelimiters(","); // ファイルの終端まで処理 while (!parser.EndOfData) { String[] row = parser.ReadFields(); // 1行読み込み readLines.Add(row); // 行を保持 } } catch (Exception e) { MessageBox.Show("" + e, "error", MessageBoxButtons.OK, MessageBoxIcon.Error); } return readLines; }
public static IMLDataSet LoadCSVToDataSet(FileInfo fileInfo, int inputCount, int outputCount, bool randomize = true, bool headers = true) { BasicMLDataSet result = new BasicMLDataSet(); CultureInfo CSVformat = new CultureInfo("en"); using (TextFieldParser parser = new TextFieldParser(fileInfo.FullName)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); if (headers) parser.ReadFields(); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); var input = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) input[i] = double.Parse(fields[i], CSVformat); var ideal = new BasicMLData(outputCount); for (int i = 0; i < outputCount; i++) ideal[i] = double.Parse(fields[i + inputCount], CSVformat); result.Add(input, ideal); } } var rand = new Random(DateTime.Now.Millisecond); return (randomize ? new BasicMLDataSet(result.OrderBy(r => rand.Next()).ToList()) : new BasicMLDataSet(result)); }
/// <summary> /// Imports the given csv file /// </summary> public void StartTransactionImport(string csvfile) { api = new SimpleRestApi(ConfigurationManager.AppSettings["ovservice"]); log4net.Config.XmlConfigurator.Configure(); parser = new TextFieldParser(csvfile) {Delimiters = new[] {","}}; parser.ReadFields(); while (!parser.EndOfData) { IList<string> csvFields = parser.ReadFields(); if (HasValidNumberOfFields(csvFields)) { Line CSVLine = new Line(csvFields); try { Retry.Repeat(3) .WithPolling(TimeSpan.FromSeconds(1)) .WithTimeout(TimeSpan.FromSeconds(10)) .Until(() => PostCSVLine(CSVLine)); } catch (Exception) { log.DebugFormat("Max retries reached, skipping line: {0}", CSVLine); } } else { log.ErrorFormat("Invalid line is skipped! (Incorrect number of fields) {0}", string.Join(",", csvFields)); } } }
private void btnConvert_Click(object sender, EventArgs e) { if (_salary == null) { return; } _salary.Header = null; _salary.Rows.Clear(); using (Stream stream = GenerateStreamFromString(_salary.RawText)) { TextFieldParser parser = new TextFieldParser(stream); parser.HasFieldsEnclosedInQuotes = true; parser.SetDelimiters(","); string[] fields; int counter = 0; while (!parser.EndOfData) { fields = parser.ReadFields(); if (fields == null || fields.Length == 0) continue; if (counter == 0) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < fields.Length; i++) { string field = fields[i].Trim().Replace(" ", "").Replace(" ", "").Replace("\n", "").Replace("\r", ""); field = field.Replace("\"", "\"\""); if (i != fields.Length - 1) { sb.AppendFormat("\"{0}\",", field); } else { sb.AppendFormat("\"{0}\"", field); } } _salary.Header = sb.ToString(); counter++; } else { string name = fields[0].Trim().Replace(" ", "").Replace(" ", "").Replace("\n", ""); if (name.StartsWith("\"") && name.EndsWith("\"")) { } else { name = "\"" + name.Replace("\"", "\"\"") + "\""; } fields[0] = name; _salary.Rows.Add(string.Join(",", fields)); } } } Render(); btnSave.Enabled = true; }
static IEnumerable<ProcessMonitorEntry> Parse(string path, bool header = true) { var parser = new TextFieldParser(path); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); if (header) { parser.ReadLine(); } while (!parser.EndOfData) { var fields = parser.ReadFields(); yield return new ProcessMonitorEntry { //TimeOfDay = fields[0], //ProcessName = fields[1], //PID = fields[2], //Operation = fields[3], Path = fields[4], Result = fields[5], //Details = fields[6] }; } parser.Close(); }
private DataTable DataTableFromCSV(string path) { var data = new DataTable(); using (var parser = new TextFieldParser(path)) { parser.SetDelimiters(new[] { "," }); parser.HasFieldsEnclosedInQuotes = true; try { var columns = parser.ReadFields(); foreach (string column in columns) { var dataColumn = new DataColumn(column); dataColumn.AllowDBNull = true; data.Columns.Add(dataColumn); } while (!parser.EndOfData) { var row = parser.ReadFields(); data.Rows.Add(row); } } catch (MalformedLineException) { throw; } } return data; }
static void Main(string[] args) { PrintHeading(); if (!File.Exists(Settings.Default.ImportFile)) { Console.WriteLine("Could not find data file at '" + Settings.Default.ImportFile); return; } if (File.Exists(Settings.Default.OutputFile)) File.Delete(Settings.Default.OutputFile); var parser = new TextFieldParser(Settings.Default.ImportFile); using (var sw = new StreamWriter(Settings.Default.OutputFile)) { parser.SetDelimiters(new[] {"\t"}); // Consume first row. if (!parser.EndOfData) parser.ReadFields(); while (!parser.EndOfData) { var fields = parser.ReadFields(); var call = new SatComCallIndentifier(fields); Console.WriteLine(call.GetSqlInsert()); sw.WriteLine(call.GetSqlInsert()); } } Console.WriteLine(); Console.WriteLine("Outputted sql to: " + Settings.Default.OutputFile); Console.ReadLine(); }
public LeMondGForceSTNCsvDataProvider(string sourceName, TextFieldParser parser, string[] firstRow) : base(parser) { if (firstRow.Length < 6) { throw new Exception(string.Format("Invalid gforce STN header. Header = '{0}'", string.Join(",", firstRow))); } firmwareVersion = LeMondGForceSTNCsvDataProvider.ParseFirmwareVersion(firstRow[1]); int year, month, day; LeMondGForceCsvDataProvider.ParseDate(firstRow[4], out year, out month, out day); int hour, minute; LeMondGForceCsvDataProvider.ParseTime(firstRow[5], out hour, out minute); StartTime = new DateTime(year, month, day, hour, minute, 0, DateTimeKind.Local); if (Parser.EndOfData) { throw new Exception(string.Format("The file {0} does not seem to be a valid LeMond .csv file because it is missing the data field headers.", sourceName)); } firstRow = Parser.ReadFields(); if (!(firstRow.Length >= 7 && firstRow[0] == "TIME" && firstRow[1] == "SPEED" && firstRow[2] == "DIST" && firstRow[3] == "POWER" && firstRow[4] == "HEART RATE" && firstRow[5] == "RPM" && firstRow[6] == "CALORIES")) { throw new Exception(string.Format("The file {0} does not seem to be a valid LeMond .csv file because it does not contain the correct data fields.", sourceName)); } }
/// <summary> /// http://stackoverflow.com/questions/16606753/populating-a-dataset-from-a-csv-file /// </summary> /// <param name="filePath"></param> /// <returns></returns> public static DataTable GetDataTabletFromCSVFile(string filePath) { DataTable csvData = new DataTable(); try { using (TextFieldParser csvReader = new TextFieldParser(filePath)) { csvReader.SetDelimiters(new string[] { "," }); csvReader.HasFieldsEnclosedInQuotes = true; string[] colFields = csvReader.ReadFields(); foreach (string column in colFields) { DataColumn datecolumn = new DataColumn(column); datecolumn.AllowDBNull = true; csvData.Columns.Add(datecolumn); } while (!csvReader.EndOfData) { string[] fieldData = csvReader.ReadFields(); //Making empty value as null for (int i = 0; i < fieldData.Length; i++) { if (fieldData[i] == "") { fieldData[i] = null; } } csvData.Rows.Add(fieldData); } } } catch (Exception ex) { } return csvData; }
public Stock GetStock(StockName stockName, DateTime startDate, DateTime endDate) { string dir = String.Format(@"..\..\StockData\Maya"); string filename = String.Format("{0}.csv", stockName); var fullPath = Path.Combine(dir, filename); var rates = new List<IStockEntry>(); var parser = new TextFieldParser(fullPath) {TextFieldType = FieldType.Delimited}; parser.SetDelimiters(","); //skips the first 3 lines parser.ReadFields(); parser.ReadFields(); parser.ReadFields(); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields != null) { StockEntry stockEntry = null; rates.Add(stockEntry); } } rates.Reverse(); var stock = new Stock(stockName, rates); return stock; }
public void ReadFile() { if (FilePath == null) return; var parser = new TextFieldParser(FilePath) { TextFieldType = FieldType.Delimited, CommentTokens = new[] {"#"} }; parser.SetDelimiters(","); parser.HasFieldsEnclosedInQuotes = false; parser.ReadLine(); while (!parser.EndOfData) { var row = parser.ReadFields(); if (row == null) continue; var newLine = new List<string>(row.Length); newLine.AddRange(row); _data.Add(newLine); } }
// Example file name: // C:\Users\Michael\Documents\Visual Studio 2013\Projects\WorldGenerator\Generate\Names\Adjectives.txt /// <summary> /// /// </summary> /// <param name="file"></param> /// <param name="delimiter">Expected delimiter is a pipe character: "|"</param> public void AddNames(string file, string delimiter) { // currently this only handles races and not deitys, adjectives, locations or whatnot (not really masculine/feminine either) DataTable nameListing = new DataTable(); nameListing.Columns.Add("FK_RaceId", typeof(int)); // I think I can get rid of this?.. nameListing.Columns.Add("Name", typeof(string)); nameListing.Columns.Add("Masculine", typeof(bool)); using (TextFieldParser parser = new TextFieldParser(file)) { parser.Delimiters = new string[] { delimiter }; while (true) { string[] names = parser.ReadFields(); if (names == null) break; foreach (string name in names) { DataRow row = nameListing.NewRow(); row["FK_RaceId"] = 0; // I think I can get rid of this?.. row["Name"] = name; row["Masculine"] = false; // TODO nameListing.Rows.Add(row); } } } // call the database // move this somewhere else when you have time SqlParameter[] parameters = new SqlParameter[1]; parameters[0] = new SqlParameter("@Names", nameListing); DataTable returnedTable = new Database("WorldGen").Crud("w.RaceNameAdd", parameters); }
// Constructor // filename - file from which to read the puzzle public Sudoku(string filename) { // TODO: Add the following exception handling // - Validate that the length of the first row is a perfect square value // - Validate that each line contains the same number of elements // - Validate that each element is a numeric value between 1 and gridSize or the unknown value using (TextFieldParser parser = new TextFieldParser(filename)) { int row = 0; parser.Delimiters = new string[] { "," }; while (!parser.EndOfData) { // Read in a line of the puzzle string[] parts = parser.ReadFields(); if (row == 0) { //Initialize the grid gridSize = parts.Length; subGridSize = Convert.ToInt32(Math.Sqrt(gridSize)); sudokuGrid = new NumberBlock[gridSize, gridSize]; // Initialize the range of possible block values rangeOfValues = new List<int>(Enumerable.Range(1, gridSize).ToList()); } // Populate the puzzle for (var col = 0; col < gridSize; col++) { sudokuGrid[row, col] = new NumberBlock(row, col, subGridSize, Convert.ToInt32(parts[col])); } row++; } } }
/// <summary> /// /// </summary> /// <param name="file"></param> /// <param name="delimiter">Expected delimiter is a pipe character: "|"</param> public void AddAdjectives() { // currently this only handles races and not deitys, adjectives, locations or whatnot (not really masculine/feminine either) DataTable nameListing = new DataTable(); nameListing.Columns.Add("Name", typeof(string)); using (TextFieldParser parser = new TextFieldParser(@"C:\Users\Michael Ovies\Source\Repos\WorldGenerator\WorldGen.Database\WordsAndNames\Adjectives.txt")) { parser.Delimiters = new string[] { "|" }; while (true) { string[] names = parser.ReadFields(); if (names == null) break; foreach (string name in names) { DataRow row = nameListing.NewRow(); row["Name"] = name; nameListing.Rows.Add(row); } } } // call the database // move this somewhere else when you have time SqlParameter[] parameters = new SqlParameter[1]; parameters[0] = new SqlParameter("@Words", nameListing); DataTable returnedTable = _dataSource.Crud("w.AdjectiveAdd", parameters); }
private void Continue_Click(object sender, RoutedEventArgs e) { continueButton.IsEnabled = false; bool flag = false; string[] lastDTS = {""}; dupeListBox.Items.Clear(); string[] dtsList = File.ReadAllLines(DTSListSuite.App.mDtsListFile); foreach (string line in dtsList) { if (line.Substring(0, 3) != "C/R") { TextFieldParser parser = new TextFieldParser(new StringReader(line)); parser.HasFieldsEnclosedInQuotes = true; parser.SetDelimiters(","); string[] ArLn = { "" }; while (!parser.EndOfData) ArLn = parser.ReadFields(); if (lastDTS.Length > 1) { if (ArLn[1] == lastDTS[1] && ArLn[2] == lastDTS[2] && ArLn[3] == lastDTS[3]) { dupeListBox.Items.Insert(0, ArLn[1] + " " + ArLn[2] + " " + ArLn[3] + "\tDuplicate!"); flag = true; } } lastDTS = ArLn; } } if (!flag) dupeListBox.Items.Insert(0, "No Duplicates!"); continueButton.IsEnabled = true; }
public Task<string[][]> GetDataFromCSVStream() { List<string[]> result = new List<string[]>(); TaskCompletionSource<string[][]> resultTask = new TaskCompletionSource<string[][]>(); try { HttpWebRequest req = (HttpWebRequest)WebRequest.Create(connectionURL); req.GetResponseAsync().ContinueWith( (task) => { WebResponse resp = task.Result; using (TextFieldParser parser = new TextFieldParser(resp.GetResponseStream())) { parser.Delimiters = new string[] { "," }; while (true) { string[] line = parser.ReadFields(); if (line == null) break; result.Add(line); } } resultTask.SetResult(result.ToArray()); } ); } catch (Exception e) { result.Add(new string[] { e.Message }); } return resultTask.Task; }
static void Main() { List<MoonPhase> moonPhases = new List<MoonPhase>(); using (TextFieldParser parser = new TextFieldParser(@"SQMData.csv")) { parser.TextFieldType = FieldType.Delimited; // Значения во входном файле разделены запятыми parser.SetDelimiters(","); // Указываем что следует игнорировать строки начинающиеся с # parser.CommentTokens = new string[] {"#"}; DateTime prevSkyclock = DateTime.MinValue; while (!parser.EndOfData) { string[] fields = null; try { fields = parser.ReadFields(); } catch (MalformedLineException) { // Игнорируем "плохие" строки continue; } // Поля в файле // 0 - Year // 1 - Month // 2 - Day // 3 - Local_time // 4 - day_of_year // 5 - hour_of_day // 6 - Sky_Quality_(mag/arc_sec_**2) // 7 - SQM_temperature_(Celsius) // 8 - cloud_cover_(%) // 9 - seeing_(1-5) // 10 - transparency_(1-5) // 11 - skyclock_time/date_used // 12 - sunrise // 13 - sunset // 14 - moonrise // 15 - moonset // 16 - moonphase DateTime skyclock = DateTime.ParseExact(fields[11], "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture); int moonPhase = int.Parse(fields[16]); if (prevSkyclock != skyclock.Date) { moonPhases.Add(new MoonPhase { Date = skyclock.Date, Phase = moonPhase }); prevSkyclock = skyclock.Date; } } } foreach (MoonPhase phase in moonPhases) { Console.WriteLine("{0:d} - {1}", phase.Date, phase.Phase); } }
public CsvReader( string path, Encoding encoding ) { Info = new FileInfo( path ); file = new TextFieldParser( path, encoding ); file.TextFieldType = FieldType.Delimited; file.Delimiters = new string[] { "," }; }
private static void ParseCharacterFromTranslationCsv(string translationCSVPath, Dictionary<string, HashSet<char>> customFontCharacterSets) { using (TextFieldParser parser = new TextFieldParser(translationCSVPath)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); parser.ReadFields(); // Skip header while (!parser.EndOfData) { string[] fields = parser.ReadFields(); string translatedString = fields[csvTranslatedStringCol]; string translationFont = fields[csvFontIdCol]; if (!string.IsNullOrEmpty(translatedString)) { foreach (char c in translatedString.ToCharArray()) { customFontCharacterSets[translationFont].Add(c); } } } } }
protected internal IEnumerable<Dictionary<string, string>> ReadCsvRecords(string path) { var resolvedPath = ReflectedHost.ResolvePath(path); using (var parser = new TextFieldParser(resolvedPath)) { parser.TextFieldType = FieldType.Delimited; parser.Delimiters = new string[] { "," }; parser.TrimWhiteSpace = true; parser.HasFieldsEnclosedInQuotes = false; string[] headers = null; while (!parser.EndOfData) { var fields = parser.ReadFields(); if (headers == null) { headers = fields; } else { var record = new Dictionary<string, string>(); for (int fieldIndex = 0; fieldIndex < headers.Length; fieldIndex++) { record.Add(headers[fieldIndex], fields[fieldIndex]); } yield return record; } } } }
public void ReadLine_SampleWithNewlineInQuotedField() { const string input = @"Name,Birth Date ""Creed, Apollo"",1942-08-17 ""Ivan Drago"",1961-11-03 ""Robert """"Rocky"""" Balboa"",1945-07-06"; var parserReader = new StringReader(input); var parser = new NotVisualBasic.FileIO.CsvTextFieldParser(parserReader); var vbParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(new StringReader(input)); vbParser.SetDelimiters(","); Assert.Equal(vbParser.ReadFields(), parser.ReadFields()); Assert.Equal(vbParser.ReadFields(), parser.ReadFields()); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); // The readline should have read into the middle of the field, which changes the parsing output Assert.Equal(new[] { @"Drago""", "1961-11-03" }, vbParser.ReadFields()); Assert.Equal(new[] { @"Drago""", "1961-11-03" }, parser.ReadFields()); Assert.Equal(vbParser.ReadFields(), parser.ReadFields()); Assert.Null(vbParser.ReadFields()); Assert.Null(parser.ReadFields()); Assert.True(vbParser.EndOfData); Assert.True(parser.EndOfData); }
//Read the newline sepearted pair first and last names public IList<NameModel> ReadAll(string filePath) { IList<NameModel> users = null; if (File.Exists(filePath)) { using (var reader = File.OpenRead(filePath)) using (var textFileParser = new TextFieldParser(reader)) { textFileParser.TrimWhiteSpace = true; textFileParser.Delimiters = new[] {","}; while (!textFileParser.EndOfData) { //Read comma sepearted line var line = textFileParser.ReadFields(); //Create mapped model for each row of data var name = GetNameModel(line); if (name != null) { if (users == null) users = new List<NameModel>(); users.Add(name); } } } } return users; }
public static List<ForexRecord> BuildRecords() { var dataFile = ConfigurationManager.AppSettings["TestDataDirectory"] + "\\ForexTrading.csv"; var records = new List<ForexRecord>(); using (var parser = new TextFieldParser(dataFile)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields == null) { continue; } records.Add(new ForexRecord { CurrencyPair = fields[0], Date = fields[1], Bid = double.Parse(fields[2]), Ask = double.Parse(fields[3]) }); } } return records; }
private TextFieldParser CreateExpectedParser(string input, bool trimWhiteSpace, bool hasFieldsEnclosedInQuotes) { var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(new StringReader(input)); parser.SetDelimiters(","); parser.TrimWhiteSpace = trimWhiteSpace; parser.HasFieldsEnclosedInQuotes = hasFieldsEnclosedInQuotes; return(parser); }
private void openButton_Click(object sender, EventArgs e) { List <string[]> parsedData = new List <string[]>(); try { // Detect the path of documents folder and assign it to initial directory path String myDocument = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); openFile.InitialDirectory = myDocument; if (openFile.ShowDialog() == DialogResult.OK) { Microsoft.VisualBasic.FileIO.TextFieldParser parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(openFile.FileName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); parser.TrimWhiteSpace = true; parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); parsedData.Add(fields); Trace.WriteLine(parsedData); /*foreach (string field in fields) * { * parsedData.Add(field.ToString()); * Trace.WriteLine(field); * * } */ } } } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// Parses a big text blob into rows and columns, using the settings /// </summary> /// <param name="text">Big blob of text</param> /// <returns>Parsed data</returns> public List <string[]> Parse(string text) { // The actual _parsing_ .NET can handle. Well, VisualBasic anyway... using (var reader = new StringReader(text)) using (var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(reader)) { var errors = new StringBuilder(); if (this.CommentCharacter != default(char)) { parser.CommentTokens = new[] { this.CommentCharacter.ToString() } } ; parser.SetDelimiters(this.Separator.ToString()); parser.HasFieldsEnclosedInQuotes = this.TextQualifier != default(char); if (this.FieldWidths != null) { parser.TextFieldType = FieldType.FixedWidth; try { parser.SetFieldWidths(this.FieldWidths.ToArray()); } catch (Exception e) { errors.AppendLine(e.Message); } } var ret = new List <string[]>(); while (!parser.EndOfData) { try { ret.Add(parser.ReadFields()); } catch (MalformedLineException e) { errors.AppendFormat("Error on line {0}: {1}\n", e.LineNumber, e.Message); } } if (errors.Length > 0) { MessageBox.Show(errors.ToString(), "Errors"); } return(ret); } }
public static DataTable DataTableFromTextFile(string location, char delimiter) { DataTable result; List <string[]> data = new List <string[]>(); var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(location); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(new string[] { "," }); while (!parser.EndOfData) { string[] row = parser.ReadFields(); data.Add(row); } result = FormDataTable(data, delimiter); return(result); }
public void ReadLine_Sample() { const string input = @"Name,Birth Date Apollo Creed,1942-08-17 Ivan Drago,1961-11-03"; var parserReader = new StringReader(input); var parser = new NotVisualBasic.FileIO.CsvTextFieldParser(parserReader); var vbParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(new StringReader(input)); vbParser.SetDelimiters(","); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); Assert.Null(vbParser.ReadFields()); Assert.Null(parser.ReadFields()); Assert.True(vbParser.EndOfData); Assert.True(parser.EndOfData); }
static private void AddDataColumns(Microsoft.VisualBasic.FileIO.TextFieldParser csvReader, DataTable table) { string[] columns = csvReader.ReadFields(); //If table has columns already, we don't want to add any. if (table.Columns.Count == 0) { for (int i = 0; i < columns.Length; i++) { DataColumn dtcCol = new DataColumn(); dtcCol.AllowDBNull = true; dtcCol.ColumnName = columns[i]; table.Columns.Add(dtcCol); } DataColumn dtcCol2 = new DataColumn(); dtcCol2.AllowDBNull = true; dtcCol2.ColumnName = "Empty"; table.Columns.Add(dtcCol2); } }
/// <summary> /// Uses the Microsoft Text Field Parser to parse the CSV file, to give a baseline against an /// established class library for parsing. /// </summary> private static void _TextFieldParserCsv() { string[] fields; string s; using (VB.TextFieldParser tfp = new VB.TextFieldParser(PerformanceTests.CSV_DATA_FILE)) { tfp.SetDelimiters(","); tfp.CommentTokens = new string[] { "#" }; tfp.HasFieldsEnclosedInQuotes = true; while (!tfp.EndOfData) { fields = tfp.ReadFields(); for (int i = 0; i < fields.Length; ++i) { s = fields[i] as string; } } } }
public static void Main(string[] args) { // HARD_CODED FOR EXAMPLE ONLY - TO BE RETRIEVED FROM APP.CONFIG IN REAL PROGRAM string hospPath = @"C:\\events\\inbound\\OBLEN_COB_Active_Inv_Advi_Daily_.csv"; string vendPath = @"C:\\events\\outbound\\Advi_OBlen_Active_Inv_Ack_Daily_.csv"; List <DenialRecord> hospList = new List <DenialRecord>(); List <DenialRecord> vendList = new List <DenialRecord>(); //List<DenialRecord> hospExcpt = new List<DenialRecord>(); // Created at point of use for now //List<DenialRecord> vendExcpt = new List<DenialRecord>(); // Created at point of use for now using (TextFieldParser hospParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(hospPath)) { hospParser.TextFieldType = FieldType.Delimited; hospParser.SetDelimiters(","); hospParser.HasFieldsEnclosedInQuotes = false; hospParser.TrimWhiteSpace = true; while (!hospParser.EndOfData) { try { string[] row = hospParser.ReadFields(); if (row.Length <= 7) { DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]); hospList.Add(dr); } } catch (Exception e) { // do something Console.WriteLine("Error is: {0}", e.ToString()); } } hospParser.Close(); hospParser.Dispose(); } using (TextFieldParser vendParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(vendPath)) { vendParser.TextFieldType = FieldType.Delimited; vendParser.SetDelimiters(","); vendParser.HasFieldsEnclosedInQuotes = false; vendParser.TrimWhiteSpace = true; while (!vendParser.EndOfData) { try { string[] row = vendParser.ReadFields(); if (row.Length <= 7) { DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]); vendList.Add(dr); } } catch (Exception e) { // do something Console.WriteLine("Error is: {0}", e.ToString()); } } vendParser.Close(); vendParser.Dispose(); } // Compare the lists each way for denials not in the other source List <DenialRecord> hospExcpt = hospList.Except(vendList).ToList(); List <DenialRecord> vendExcpt = vendList.Except(hospList).ToList(); }
private void AddDataFromFile(string data_file) { using (TextFieldParser parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(data_file)) { parser.TrimWhiteSpace = true; parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters("\t"); System.Windows.Forms.DataVisualization.Charting.Series series = null; while (true) { string[] parts = parser.ReadFields(); if (parts == null) { break; } if (parts.Length >= 3) { string name = parts[0]; string date = parts[1]; string value = parts[2]; if (series == null) { if (chartQuotes.Series.FindByName(name) == null) { chartQuotes.Series.Add(name); series = chartQuotes.Series[name]; series.ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.Line; } else { break; } } DateTime dt; double d; if (Double.TryParse(value, out d) && DateTime.TryParse(date, out dt)) { series.Points.AddXY(dt, d); if (!dateMin.HasValue) { dateMin = new DateTime(); dateMin = dt; } else if (dt < dateMin) { dateMin = dt; } if (!dateMax.HasValue) { dateMax = new DateTime(); dateMax = dt; } else if (dt > dateMax) { dateMax = dt; } } } } } if (dateMin.HasValue && dateMax.HasValue) { chartQuotes.ChartAreas[0].AxisX.Minimum = dateMin.Value.ToOADate(); chartQuotes.ChartAreas[0].AxisX.Maximum = dateMax.Value.ToOADate(); dateFrom.Value = dateMin.Value; dateTo.Value = dateMax.Value; } }
private void Button6_Click(System.Object sender, System.EventArgs e) { // Define the Column Definition DataTable dt = new DataTable(); dt.Columns.Add("OrderID", typeof(int)); dt.Columns.Add("CustomerID", typeof(string)); dt.Columns.Add("EmployeeID", typeof(int)); dt.Columns.Add("OrderDate", typeof(System.DateTime)); dt.Columns.Add("RequiredDate", typeof(System.DateTime)); dt.Columns.Add("ShippedDate", typeof(System.DateTime)); dt.Columns.Add("ShipVia", typeof(int)); dt.Columns.Add("Freight", typeof(decimal)); dt.Columns.Add("ShipName", typeof(string)); dt.Columns.Add("ShipAddress", typeof(string)); dt.Columns.Add("ShipCity", typeof(string)); dt.Columns.Add("ShipRegion", typeof(string)); dt.Columns.Add("ShipPostalCode", typeof(string)); dt.Columns.Add("ShipCountry", typeof(string)); using (cn == new SqlConnection("Server='Server_Name';Database='Database_Name';Trusted_Connection=True;")) { cn.Open(); Microsoft.VisualBasic.FileIO.TextFieldParser reader = default(Microsoft.VisualBasic.FileIO.TextFieldParser); string[] currentRow = null; DataRow dr = default(DataRow); string sqlColumnDataType = null; reader = My.Computer.FileSystem.OpenTextFieldParser("C:\\Users\\Excel\\Desktop\\OrdersTest.csv"); reader.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; reader.Delimiters = new string[] { "," }; while (!reader.EndOfData) { try { currentRow = reader.ReadFields(); dr = dt.NewRow(); for (currColumn = 0; currColumn <= dt.Columns.Count - 1; currColumn++) { sqlColumnDataType = dt.Columns(currColumn).DataType.Name; switch (sqlColumnDataType) { case "String": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToString(currentRow(currColumn)); } break; case "Decimal": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = 0; } else { dr.Item(currColumn) = Convert.ToDecimal(currentRow(currColumn)); } break; case "DateTime": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToDateTime(currentRow(currColumn)); } break; } } dt.Rows.Add(dr); } catch (Microsoft.VisualBasic.FileIO.MalformedLineException ex) { Interaction.MsgBox("Line " + ex.Message + "is not valid." + Constants.vbCrLf + "Terminating Read Operation."); reader.Close(); reader.Dispose(); //Return False } finally { dr = null; } } using (SqlBulkCopy copy = new SqlBulkCopy(cn)) { copy.DestinationTableName = "[dbo].[Orders]"; copy.WriteToServer(dt); } } }
public static bool GetCsvData(string CSVFileName, ref DataTable CSVTable) { Microsoft.VisualBasic.FileIO.TextFieldParser reader = default(Microsoft.VisualBasic.FileIO.TextFieldParser); string[] currentRow = null; DataRow dr = default(DataRow); string sqlColumnDataType = null; reader = My.Computer.FileSystem.OpenTextFieldParser(CSVFileName); reader.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; reader.Delimiters = new string[] { "," }; while (!reader.EndOfData) { try { currentRow = reader.ReadFields(); dr = CSVTable.NewRow(); for (currColumn = 0; currColumn <= CSVTable.Columns.Count - 1; currColumn++) { sqlColumnDataType = CSVTable.Columns(currColumn).DataType.Name; switch (sqlColumnDataType) { case "String": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToString(currentRow(currColumn)); } break; case "Decimal": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = 0; } else { dr.Item(currColumn) = Convert.ToDecimal(currentRow(currColumn)); } break; case "DateTime": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToDateTime(currentRow(currColumn)); } break; } } CSVTable.Rows.Add(dr); } catch (Microsoft.VisualBasic.FileIO.MalformedLineException ex) { Interaction.MsgBox("Line " + ex.Message + "is not valid." + Constants.vbCrLf + "Terminating Read Operation."); reader.Close(); reader.Dispose(); return(false); } finally { dr = null; } } reader.Close(); reader.Dispose(); return(true); }
public static void ImportCSVtoSubModel( string inputFn, AdminShell.AdministrationShellEnv env, AdminShell.Submodel sm, AdminShell.SubmodelRef smref) { AdminShell.SubmodelElementCollection[] propGroup = new AdminShell.SubmodelElementCollection[10]; int i_propGroup = 0; var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(inputFn); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(";"); string[] rows = parser.ReadFields(); if (rows == null) { throw new InvalidOperationException( $"There were no fields read from the inputFn: {inputFn}"); } if ((rows[0] != "typeName" || rows[1] != "idShort" || rows[2] != "value") || (rows.Length > 3 && ( rows[3] != "valueType" || rows[4] != "category" || rows[5] != "descriptionEN" || rows[6] != "descriptionDE" ))) { return; } sm.idShort = inputFn.Split('\\').Last().Replace(".csv", ""); while (!parser.EndOfData) { rows = parser.ReadFields(); if (rows == null) { throw new InvalidOperationException( $"There were no fields read from inputFn: {inputFn}"); } switch (rows[0]) { case "SubmodelElementCollection": propGroup[i_propGroup] = AdminShell.SubmodelElementCollection.CreateNew(rows[1]); if (i_propGroup == 0) { sm.Add(propGroup[0]); if (rows.Length > 3) { if (rows[7] != "") { propGroup[0].semanticId = new AdminShellV20.SemanticId( AdminShell.Reference.CreateNew( "ConceptDescription", false, "IRI", rows[7])); } } propGroup[0].kind = AdminShellV20.ModelingKind.CreateAsInstance(); } else { propGroup[i_propGroup - 1].Add(propGroup[i_propGroup]); } i_propGroup++; break; case "End-SubmodelElementCollection": if (i_propGroup != 0) { i_propGroup--; } break; case "Property": var p = AdminShell.Property.CreateNew(rows[1].Replace("-", "_")); p.value = rows[2]; if (rows.Length > 3) { p.valueType = rows[3]; p.category = rows[4]; if (rows[5] != "") { p.AddDescription("en", rows[5]); } if (rows[6] != "") { p.AddDescription("de", rows[6]); } p.kind = AdminShellV20.ModelingKind.CreateAsInstance(); if (rows[7] != "") { p.semanticId = new AdminShell.SemanticId( AdminShell.Reference.CreateNew( "ConceptDescription", false, "IRI", rows[7])); } } if (i_propGroup == 0) { sm.Add(p); } else { propGroup[i_propGroup - 1].Add(p); } break; } } }
protected void btnUpload_Click(object sender, EventArgs e) { var valid = true; if (!FileUploadControl.HasFile) { return; } var filename = FileUploadControl.FileName; var s = filename.Split('.'); var extension = s[s.Length - 1]; var saveAsFileName = "UploadKuotaCutiPegawai_" + DateTime.Now.ToString("yyyyMMddhhmmss") + "." + extension; var contenttype = FileUploadControl.PostedFile.ContentType; var log = ""; try { if (contenttype == "application/octet-stream" || contenttype == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" || contenttype == "application/vnd.ms-excel" || contenttype == "text/csv") { FileUploadControl.SaveAs(Server.MapPath(@"../../temp/xls/") + saveAsFileName); log += "Upload file " + filename + " success.\t" + DateTime.Now.ToString(CultureInfo.InvariantCulture) + "\n\r"; var uploadedPath = Server.MapPath(@"../../temp/xls/") + saveAsFileName; var ctr = 0; var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(uploadedPath); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(new string[] { "," }); while (!parser.EndOfData) { string[] row = parser.ReadFields(); if (ctr == 0) { ctr = 1; } else { if (row != null) { var employee_code = row[0]; var employee_name = row[1]; var start_date = row[4]; var end_date = row[5]; var Quota = row[6]; if (employee_code != "NIK") { try { //InsertXlsRowData(row); HR_CUTI_KUOTA_DAL _dalHR_CUTI_KUOTA_DAL = null; Hashtable htParameters = null; _dalHR_CUTI_KUOTA_DAL = new HR_CUTI_KUOTA_DAL(); htParameters = new Hashtable(); var iNextId = 0; htParameters.Clear(); htParameters["p_NIK"] = employee_code; htParameters["p_START_DATE"] = Utility.ToDateTime(start_date); htParameters["p_END_DATE"] = Utility.ToDateTime(end_date); htParameters["p_Quota"] = Quota; Utility.ApplyDefaultProp(htParameters); _dalHR_CUTI_KUOTA_DAL.InsertTempBalance(htParameters, ref iNextId); log += "Insert Data : " + employee_code + " - " + employee_name + " Success.\t" + DateTime.Now.ToString(CultureInfo.InvariantCulture) + "\n\r"; } catch (Exception exc) { log += "Insert Data : " + employee_code + " - " + employee_name + " Failed.\t" + exc.InnerException.Message + "\t" + DateTime.Now.ToString(CultureInfo.InvariantCulture) + "\n\r"; valid = false; // Utility.ShowMessageBox(this, Utility.LOAD_DATA_FAIL_MESSAGE, ex, null, null); } } } } } Utility.ShowMessageBox(this, valid ? "Upload Success" : "Upload Success but error in processing data. Check Log for detail info.", null, "kuotacutiimport.aspx"); //BindGridUpload(); } else { Utility.ShowMessageBox(this, "Error Excel Format", null, null); } } catch (Exception ex) { Utility.ShowMessageBox(this, "Upload Failed.", ex, null); } CreateLog(log); }
public static void ImportCSVtoSubModel( string inputFn, AdminShell.AdministrationShellEnv env, AdminShell.Submodel sm, AdminShell.SubmodelRef smref) { AdminShell.SubmodelElementCollection[] propGroup = new AdminShell.SubmodelElementCollection[10]; int i_propGroup = 0; var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(inputFn); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(";"); string[] rows = parser.ReadFields(); if (rows == null || rows.Length < 3 || rows[0] != "typeName" || rows[1] != "idShort" || rows[2] != "value") { return; } while (!parser.EndOfData) { rows = parser.ReadFields(); if (rows == null || rows.Length < 1) { continue; } switch (rows[0]) { case "SubmodelElementCollection": propGroup[i_propGroup] = AdminShell.SubmodelElementCollection.CreateNew(rows[1]); if (i_propGroup == 0) { sm.Add(propGroup[0]); } else { propGroup[i_propGroup - 1].Add(propGroup[i_propGroup]); } i_propGroup++; break; case "End-SubmodelElementCollection": if (i_propGroup != 0) { i_propGroup--; } break; case "Property": var p = AdminShell.Property.CreateNew(rows[1]); p.valueType = "string"; p.value = rows[2]; if (i_propGroup == 0) { sm.Add(p); } else { propGroup[i_propGroup - 1].Add(p); } break; } } }
public static List <CompressorInputLine> Create(Stream inputFile) { var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(inputFile); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.Delimiters = new string[] { "\t" }; //skip the first line, it's header info parser.ReadLine(); var inputLines = new List <CompressorInputLine>(); while (!parser.EndOfData) { string[] tokens = parser.ReadFields(); //column order from the most recent CSV file we've seen //Id Asset Name Local Timestamp UTC Milliseconds Compressor Oil Pressure Engine Oil Pressure Fuel Pressure Stage 1 Discharge Pressure Stage 2 Discharge Pressure Stage 3 Discharge Pressure Suction Pressure Max Discharge Pressure Max Suction Pressure Compressor Oil Temp Cylinder 1 Discharge Temp Cylinder 2 Discharge Temp Cylinder 3 Discharge Temp Cylinder 4 Discharge Temp Engine Oil Temp Suction Temp RPM Max RPMs Run Status SD Status Code Runtime Hrs Downtime Hrs Yest Gas Flow Rate Gas Flow Rate_RAW Max Gas Flowrate Compressor Stages Horsepower Unit Size Last Successful Comm Time Pct Successful Msgs Today Successful Msgs Today Facility Desc Facility ID TOW Comp Name //Successful Msgs Today Facility Desc Facility ID TOW Comp Name inputLines.Add(new CompressorInputLine() { Id = parseInt(tokens[0]), AssetName = tokens[1], LocalTimestamp = parseDate(tokens[2]), UTCMilliseconds = parseInt(tokens[3]), CompressorOilPressure = parseDouble(tokens[4]), CompressorOilTemp = parseDouble(tokens[5]), CompressorStages = parseInt(tokens[6]), Cylinder1DischargeTemp = parseDouble(tokens[7]), Cylinder2DischargeTemp = parseDouble(tokens[8]), Cylinder3DischargeTemp = parseDouble(tokens[9]), Cylinder4DischargeTemp = parseDouble(tokens[10]), DowntimeHrsYest = parseDouble(tokens[11]), EngineOilPressure = parseDouble(tokens[12]), EngineOilTemp = parseDouble(tokens[13]), FacilityDesc = tokens[14], FacilityId = tokens[15], FuelPressure = parseDouble(tokens[16]), GasFlowRate = parseDouble(tokens[17]), GasFlowRate_RAW = parseDouble(tokens[18]), Horsepower = parseDouble(tokens[19]), LastSuccessfulCommTime = parseDate(tokens[20]), MaxDischargePressure = parseDouble(tokens[21]), MaxGasFlowrate = parseDouble(tokens[22]), MaxRPMs = parseDouble(tokens[23]), MaxSuctionPressure = parseDouble(tokens[24]), PctSuccessfulMsgsToday = parseDouble(tokens[25]), RPM = parseDouble(tokens[26]), RunStatus = tokens[27], RuntimeHrs = parseDouble(tokens[28]), SDStatusCode = tokens[29], Stage1DischargePressure = parseDouble(tokens[30]), Stage2DischargePressure = parseDouble(tokens[31]), Stage3DischargePressure = parseDouble(tokens[32]), SuccessfulMsgsToday = parseInt(tokens[33]), SuctionPressure = parseDouble(tokens[34]), SuctionTemp = parseDouble(tokens[35]), TOWCompName = tokens[36], UnitSize = tokens[37] }); } return(inputLines); }
public CSVFile UploadToObject(string project, string csvFile) // Parses the uploaded file to a CSVFile object containing alll necessary data from the file { var nameList = new List <string>(); var valueList = new List <List <string> >(); var typeList = new List <string>(); var path = Path.Combine(Server.MapPath("~/App_Data/Files/"), csvFile); var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(path, System.Text.Encoding.UTF7); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; // delimiter set to ; - this is where we could change code to handle other delimiters as well parser.SetDelimiters(new string[] { ";" }); int i = 0; // Parse the file content while (!parser.EndOfData) { // Get one row of data string[] row = parser.ReadFields(); // Get the column names if (i == 0) { nameList = row.ToList(); } else // get file content { valueList.Add(row.ToList()); if (i == 1) { // Define types of the elements of the first row with data typeList = row.ToList(); int j = 0; foreach (var term in row) { if (double.TryParse(term, out double n)) { typeList[j] = "N"; } else { typeList[j] = "S"; } j++; } } else { // Check the types of the eleents of the following rows, change to S (string) if we detect one int j = 0; foreach (var term in row) { if (!double.TryParse(term, out double n)) { typeList[j] = "S"; } j++; } } } i++; } // Create CSVFile object, give the properties values and return CSVFile myFile = new CSVFile(); myFile.Project = project; myFile.NameList = nameList; myFile.ValueList = valueList; myFile.TypeList = typeList; return(myFile); }