public static IEnumerable<object> ReadCsv(Stream stream) { var parser = new TextFieldParser(stream) { TextFieldType = FieldType.Delimited, Delimiters = new[] { "," }, HasFieldsEnclosedInQuotes = true, TrimWhiteSpace = true }; string[] fields = parser.ReadFields(); while (!parser.EndOfData) { string[] row = parser.ReadFields(); var data = (IDictionary<string, object>)new ExpandoObject(); for (int i = 0; i < fields.Length; i++) { data[fields[i]] = row[i]; } yield return data; } }
public Stock GetStock(StockName stockName, DateTime startDate, DateTime endDate) { string dir = String.Format(@"..\..\StockData\Maya"); string filename = String.Format("{0}.csv", stockName); var fullPath = Path.Combine(dir, filename); var rates = new List<IStockEntry>(); var parser = new TextFieldParser(fullPath) {TextFieldType = FieldType.Delimited}; parser.SetDelimiters(","); //skips the first 3 lines parser.ReadFields(); parser.ReadFields(); parser.ReadFields(); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields != null) { StockEntry stockEntry = null; rates.Add(stockEntry); } } rates.Reverse(); var stock = new Stock(stockName, rates); return stock; }
/// <summary> /// Imports the given csv file /// </summary> public void StartTransactionImport(string csvfile) { api = new SimpleRestApi(ConfigurationManager.AppSettings["ovservice"]); log4net.Config.XmlConfigurator.Configure(); parser = new TextFieldParser(csvfile) {Delimiters = new[] {","}}; parser.ReadFields(); while (!parser.EndOfData) { IList<string> csvFields = parser.ReadFields(); if (HasValidNumberOfFields(csvFields)) { Line CSVLine = new Line(csvFields); try { Retry.Repeat(3) .WithPolling(TimeSpan.FromSeconds(1)) .WithTimeout(TimeSpan.FromSeconds(10)) .Until(() => PostCSVLine(CSVLine)); } catch (Exception) { log.DebugFormat("Max retries reached, skipping line: {0}", CSVLine); } } else { log.ErrorFormat("Invalid line is skipped! (Incorrect number of fields) {0}", string.Join(",", csvFields)); } } }
public static IMLDataSet LoadCSVToDataSet(FileInfo fileInfo, int inputCount, int outputCount, bool randomize = true, bool headers = true) { BasicMLDataSet result = new BasicMLDataSet(); CultureInfo CSVformat = new CultureInfo("en"); using (TextFieldParser parser = new TextFieldParser(fileInfo.FullName)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); if (headers) parser.ReadFields(); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); var input = new BasicMLData(inputCount); for (int i = 0; i < inputCount; i++) input[i] = double.Parse(fields[i], CSVformat); var ideal = new BasicMLData(outputCount); for (int i = 0; i < outputCount; i++) ideal[i] = double.Parse(fields[i + inputCount], CSVformat); result.Add(input, ideal); } } var rand = new Random(DateTime.Now.Millisecond); return (randomize ? new BasicMLDataSet(result.OrderBy(r => rand.Next()).ToList()) : new BasicMLDataSet(result)); }
// TextFieldParser Method private static List<string> TextFieldParserMethod(string SourceFilePath) { List<string> ResultList = new List<string>(); // Parser using (TextFieldParser parser = new TextFieldParser(SourceFilePath)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); // Find Index string[] header = parser.ReadFields(); int physicalDeliveryOfficeNameIndex = Array.IndexOf(header, "physicalDeliveryOfficeName"); int nameIndex = Array.IndexOf(header, "name"); int titleIndex = Array.IndexOf(header, "title"); string[] CurrentRow; while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); CurrentRow = new string[3] { fields[physicalDeliveryOfficeNameIndex], fields[nameIndex], fields[titleIndex] }; if (!string.IsNullOrEmpty(CurrentRow[0])) ResultList.Add(string.Join(",", CurrentRow)); } } return ResultList; }
public ReportFile(string path) { fileInfo = new FileInfo(path); if (!fileInfo.Exists) { CreateReportFile(fileInfo); } var parser = new TextFieldParser(path) { Delimiters = new[] { "," } }; if (!parser.EndOfData) { var headerFields = parser.ReadFields(); var langList = new List<string>(); // skip Date/Time and Word Count column headers for (var i = 2; i < headerFields.Length; ++i) { var lang = headerFields[i]; langList.Add(lang); langs.Add(lang); } while (!parser.EndOfData) { rows.Add(new ReportRow(langList.ToArray(), parser.ReadFields())); } } parser.Close(); }
/// <summary> /// http://stackoverflow.com/questions/16606753/populating-a-dataset-from-a-csv-file /// </summary> /// <param name="filePath"></param> /// <returns></returns> public static DataTable GetDataTabletFromCSVFile(string filePath) { DataTable csvData = new DataTable(); try { using (TextFieldParser csvReader = new TextFieldParser(filePath)) { csvReader.SetDelimiters(new string[] { "," }); csvReader.HasFieldsEnclosedInQuotes = true; string[] colFields = csvReader.ReadFields(); foreach (string column in colFields) { DataColumn datecolumn = new DataColumn(column); datecolumn.AllowDBNull = true; csvData.Columns.Add(datecolumn); } while (!csvReader.EndOfData) { string[] fieldData = csvReader.ReadFields(); //Making empty value as null for (int i = 0; i < fieldData.Length; i++) { if (fieldData[i] == "") { fieldData[i] = null; } } csvData.Rows.Add(fieldData); } } } catch (Exception ex) { } return csvData; }
static void Main(string[] args) { PrintHeading(); if (!File.Exists(Settings.Default.ImportFile)) { Console.WriteLine("Could not find data file at '" + Settings.Default.ImportFile); return; } if (File.Exists(Settings.Default.OutputFile)) File.Delete(Settings.Default.OutputFile); var parser = new TextFieldParser(Settings.Default.ImportFile); using (var sw = new StreamWriter(Settings.Default.OutputFile)) { parser.SetDelimiters(new[] {"\t"}); // Consume first row. if (!parser.EndOfData) parser.ReadFields(); while (!parser.EndOfData) { var fields = parser.ReadFields(); var call = new SatComCallIndentifier(fields); Console.WriteLine(call.GetSqlInsert()); sw.WriteLine(call.GetSqlInsert()); } } Console.WriteLine(); Console.WriteLine("Outputted sql to: " + Settings.Default.OutputFile); Console.ReadLine(); }
public void Import(string translationCSVPath) { patchDict = new Dictionary<int, string>(); using (TextFieldParser parser = new TextFieldParser(translationCSVPath)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); parser.ReadFields(); // Skip header int i = 0; while (!parser.EndOfData) { string[] fields = parser.ReadFields(); string translatedString = fields[1]; if (!string.IsNullOrEmpty(translatedString)) { patchDict.Add(i, fields[1]); } i++; } } }
public static XtrainerDataProvider Create(SourcedStream sourcedStream) { var parser = new TextFieldParser(sourcedStream.Stream) { TextFieldType = FieldType.Delimited, Delimiters = new[] {","} }; if (parser.EndOfData) { throw new Exception(string.Format("The file {0} does not seem to be a valid xtrainer .csvx file because it is empty.", sourcedStream.Source)); } var rows = parser.ReadFields(); if (!(rows.Length >= 1 && rows[0] == "ver")) { throw new Exception(string.Format("The file {0} does not seem to be a valid xtrainer .csvx file because it doesn't say 'ver' in the first field.", sourcedStream.Source)); } rows = parser.ReadFields(); int dummy; if (rows.Length == 5 && int.TryParse(rows[0], out dummy)) { var startDateTime = new DateTime(int.Parse(rows[0]), int.Parse(rows[1]), int.Parse(rows[2]), int.Parse(rows[3]), int.Parse(rows[4]), 0); parser.ReadFields(); return new XtrainerDataProvider(parser,startDateTime); } return new XtrainerDataProvider(parser); }
public void ReadLine_SampleWithNewlineInQuotedField() { const string input = @"Name,Birth Date ""Creed, Apollo"",1942-08-17 ""Ivan Drago"",1961-11-03 ""Robert """"Rocky"""" Balboa"",1945-07-06"; var parserReader = new StringReader(input); var parser = new NotVisualBasic.FileIO.CsvTextFieldParser(parserReader); var vbParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(new StringReader(input)); vbParser.SetDelimiters(","); Assert.Equal(vbParser.ReadFields(), parser.ReadFields()); Assert.Equal(vbParser.ReadFields(), parser.ReadFields()); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); // The readline should have read into the middle of the field, which changes the parsing output Assert.Equal(new[] { @"Drago""", "1961-11-03" }, vbParser.ReadFields()); Assert.Equal(new[] { @"Drago""", "1961-11-03" }, parser.ReadFields()); Assert.Equal(vbParser.ReadFields(), parser.ReadFields()); Assert.Null(vbParser.ReadFields()); Assert.Null(parser.ReadFields()); Assert.True(vbParser.EndOfData); Assert.True(parser.EndOfData); }
/// <summary> /// Parses CSV file. /// </summary> /// <param name="file">File to parse.</param> /// <param name="entityNameField">Name of a field containing entity names.</param> /// <returns>Parsed entities</returns> public IEnumerable<object> Parse(string file, string entityNameField) { using (var parser = new TextFieldParser(file)) { parser.SetDelimiters(new[] { "," }); parser.HasFieldsEnclosedInQuotes = true; parser.TrimWhiteSpace = true; // Read header. var columns = parser.ReadFields().Select(col => col.ToLowerInvariant()).ToArray(); entityNameField = entityNameField.ToLowerInvariant(); if (columns.FirstOrDefault(col => col == entityNameField) == null) { throw new Exception("File '" + file + "' does not contain required field '" + entityNameField + "'"); } // Key is field name, value is field value. var row = new Dictionary<string, string>(); // Parse rows. while (!parser.EndOfData) { var values = parser.ReadFields(); for (int i = 0; i < columns.Length; i++) { row[columns[i]] = values[i]; } yield return CreateEntity(row, entityNameField); } } }
public DataTable GetDataTabletFromCSVFile(string csv_file_path, string service) { DataTable csvData = new DataTable(); try { using (TextFieldParser csvReader = new TextFieldParser(csv_file_path)) { csvReader.SetDelimiters(new string[] { "," }); csvReader.HasFieldsEnclosedInQuotes = true; string[] colFields = csvReader.ReadFields(); for (int i = 0; i < colFields.Length; i++) { colFields[i] = this.RemoveSpecialCharacters(colFields[i]); } foreach (string column in colFields) { DataColumn datecolumn = new DataColumn(column); datecolumn.AllowDBNull = true; csvData.Columns.Add(datecolumn); } while (!csvReader.EndOfData) { string[] fieldData = csvReader.ReadFields(); for (int i = 0; i < fieldData.Length; i++) { if (fieldData[i] == "") { fieldData[i] = null; } else { switch (service) { case "email": fieldData[i] = this.RemoveSpecialCharactersEmail(fieldData[i]); break; case "deduplicate": fieldData[i] = this.RemoveSpecialCharactersDeduplicate(fieldData[i]); break; default: fieldData[i] = this.RemoveSpecialCharacters(fieldData[i]); break; } } } csvData.Rows.Add(fieldData); } } } catch (Exception ex) { //TO DO } return csvData; }
public static void ImportCSV(string file) { TextFieldParser parser = new TextFieldParser(file); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); Boolean header = true; while (!parser.EndOfData) { // We don't want to import the header row if (header) { header = false; parser.ReadFields(); continue; } // Vars DateTime dt; DateTime dt2 = Convert.ToDateTime("4/15/2010"); Boolean error = false; string[] fields = parser.ReadFields(); // Check for error conditions if (String.IsNullOrEmpty(fields[3])) { // Score is null fields[3] = "0"; error = true; } if (!DateTime.TryParse(fields[2], out dt)) { // Date is invalid fields[2] = "05/05/55"; error = true; } if (dt > dt2) { // Date is > 4/15/2010 error = true; } // Insert into the correct table if (error) { InsertError(fields[0], fields[1], fields[2], fields[3]); } else { Insert(fields[0], fields[1], fields[2], fields[3]); } } parser.Close(); }
/// <summary> /// CSVを読み込みます。 /// </summary> /// <param name="path">読み込みディレクトリパス</param> /// <param name="filename">読み込みファイル名</param> /// <returns>読み込んだCSVをDataTableで返却</returns> public DataTable CsvReader(string path, string filename) { string[] data; DataTable dt = new DataTable(); TextFieldParser parser = new TextFieldParser(path + filename, encord); parser.TextFieldType = FieldType.Delimited; // 区切り文字はコンマ parser.SetDelimiters(","); //データがあるか確認します。 if (!parser.EndOfData) { //CSVファイルから1行読み取ります。 data = parser.ReadFields(); //カラムの数を取得します。 int cols = data.Length; try { for (int i = 0; i < cols; i++) { //カラム名をセットします dt.Columns.Add(data[i]); } } catch (System.Data.DuplicateNameException) { MessageBox.Show( "読み込みエラー\nチェックリストの中に重複している値がないか確認し、修正を行ってから実行しなおしてください。" ); //DataTable aa = new DataTable(); //return aa; } } // CSVをデータテーブルに格納 while (!parser.EndOfData) { data = parser.ReadFields(); DataRow row = dt.NewRow(); for (int i = 0; i < dt.Columns.Count; i++) { row[i] = data[i]; } dt.Rows.Add(row); } parser.Dispose(); return dt; }
private void btnParseTextFiles_Click(object sender, EventArgs e) { using (TextFieldParser myReader = new TextFieldParser("test.txt")) { // 定义三种格式之各栏的宽度与分隔字符。 int[] FirstFormat = { 5, 10, -1 }; int[] SecondFormat = { 6, 10, 17, -1 }; string[] ThirdFormat = { "," }; this.DataGridView1.Rows.Clear(); this.DataGridView2.Rows.Clear(); this.DataGridView3.Rows.Clear(); string[] CurrentRow; while (!myReader.EndOfData) { try { string RowType = myReader.PeekChars(2); switch (RowType) { case "CK": myReader.TextFieldType = FieldType.FixedWidth; myReader.FieldWidths = FirstFormat; // 或是 myReader.SetFieldWidths(FirstFormat); CurrentRow = myReader.ReadFields(); this.DataGridView1.Rows.Add(CurrentRow); break; case "PB": myReader.TextFieldType = FieldType.FixedWidth; myReader.FieldWidths = SecondFormat; // 或是 myReader.SetFieldWidths(SecondFormat); CurrentRow = myReader.ReadFields(); this.DataGridView2.Rows.Add(CurrentRow); break; case "SP": myReader.TextFieldType = FieldType.Delimited; myReader.Delimiters = ThirdFormat; // 或是 myReader.SetDelimiters(ThirdFormat); CurrentRow = myReader.ReadFields(); this.DataGridView3.Rows.Add(CurrentRow); break; } } catch (MalformedLineException ex) { MessageBox.Show("行 " + ex.Message + " 是无效的。略过。"); } } // 排序各个 DataGridView 控件的内容。 DataGridView1.Sort(DataGridView1.Columns[0], System.ComponentModel.ListSortDirection.Ascending); DataGridView2.Sort(DataGridView2.Columns[0], System.ComponentModel.ListSortDirection.Ascending); DataGridView3.Sort(DataGridView3.Columns[0], System.ComponentModel.ListSortDirection.Ascending); } }
//Read the newline sepearted pair first and last names public IList<NameModel> ReadAll(string filePath) { IList<NameModel> users = null; if (File.Exists(filePath)) { using (var reader = File.OpenRead(filePath)) using (var textFileParser = new TextFieldParser(reader)) { textFileParser.TrimWhiteSpace = true; textFileParser.Delimiters = new[] {","}; while (!textFileParser.EndOfData) { //Read comma sepearted line var line = textFileParser.ReadFields(); //Create mapped model for each row of data var name = GetNameModel(line); if (name != null) { if (users == null) users = new List<NameModel>(); users.Add(name); } } } } return users; }
/// <summary> /// inladen van het CSV bestand met verschillende diersoorten /// </summary> /// <returns>lijst van ingeladen dieren</returns> public List<Diersoort> Load() { List<Diersoort> loadedAnimals = new List<Diersoort>(); //Get all the animal names from the local storage using (TextFieldParser parser = new TextFieldParser(FilePath + "broedvogels.csv")) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); foreach (string field in fields) { string[] columns = field.Split(';'); string[] startDatetime = columns[2].Split('-'); string[] endDatetime = columns[3].Split('-'); DateTime sdt = new DateTime(2000, Convert.ToInt32(startDatetime[1]), Convert.ToInt32(startDatetime[0])); DateTime edt = new DateTime(2000, Convert.ToInt32(endDatetime[1]), Convert.ToInt32(endDatetime[0])); Diersoort toAdd = new Vogel( columns[0], columns[1], sdt, edt, Convert.ToInt32(columns[4])); loadedAnimals.Add(toAdd); } } } return loadedAnimals; }
public ProviderMgr() { TextFieldParser parser = new TextFieldParser(@"" + Directory.GetCurrentDirectory() + "\\dnscrypt-resolvers.csv"); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { ProviderItem providerItem = new ProviderItem(); string[] fields = parser.ReadFields(); providerItem.setName(fields[0]); providerItem.setFullName(fields[1]); providerItem.setDescription(fields[2]); providerItem.setLocation(fields[3]); providerItem.setCoordinates(fields[4]); providerItem.setURL(fields[5]); providerItem.setVersion(fields[6]); providerItem.setDNSSEC(fields[7]); providerItem.setNoLogs(fields[8]); providerItem.setNamecoin(fields[9]); providerItem.setAddress(fields[10]); providerItem.setProviderName(fields[11]); providerItem.setPublicKey(fields[12]); providerItem.setPublicKeyTXT(fields[13]); providerList.Add(providerItem); } parser.Close(); // Remove first line from CVS (Name, etc, etc) providerList.RemoveAt(0); }
public TradeDataEntry[] Read(string filePath) { using (var parser = new TextFieldParser(filePath)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); var tradeDataEntries = new List<TradeDataEntry>(); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields != null) { if (fields.Length != 6) { return new TradeDataEntry[] { }; } var date = DateTime.ParseExact(fields[0], "yyyy-M-d", null); var open = double.Parse(fields[1], CultureInfo.InvariantCulture); var high = double.Parse(fields[2], CultureInfo.InvariantCulture); var low = double.Parse(fields[3], CultureInfo.InvariantCulture); var close = double.Parse(fields[4], CultureInfo.InvariantCulture); var volume = int.Parse(fields[5]); tradeDataEntries.Add(new TradeDataEntry(date, open, high, low, close, volume)); } } return tradeDataEntries.ToArray(); } }
public void ReadFile() { if (FilePath == null) return; var parser = new TextFieldParser(FilePath) { TextFieldType = FieldType.Delimited, CommentTokens = new[] {"#"} }; parser.SetDelimiters(","); parser.HasFieldsEnclosedInQuotes = false; parser.ReadLine(); while (!parser.EndOfData) { var row = parser.ReadFields(); if (row == null) continue; var newLine = new List<string>(row.Length); newLine.AddRange(row); _data.Add(newLine); } }
public IEnumerable<Brand> Read() { using (TextFieldParser parser = new TextFieldParser(path)) { parser.CommentTokens = new string[] { "#" }; parser.SetDelimiters(new string[] { ";" }); parser.HasFieldsEnclosedInQuotes = true; // Skip over header line. parser.ReadLine(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); yield return new Brand() { Name = fields[0], FactoryLocation = fields[1], EstablishedYear = int.Parse(fields[2]), Profit = double.Parse(fields[3], swedishCulture) }; } } }
public static ILeMondDataProvider Create(SourcedStream sourcedStream) { var parser = new TextFieldParser(sourcedStream.Stream); parser.TextFieldType = FieldType.Delimited; parser.Delimiters = new[] { "," }; if (parser.EndOfData) { throw new Exception(string.Format("The file {0} does not seem to be a valid LeMond .csv file because it is empty.", sourcedStream.Source)); } var row = parser.ReadFields(); if (!(row.Length >= 1 && row[0] == "LeMond")) { throw new Exception(string.Format("The file {0} does not seem to be a valid LeMond .csv file because it doesn't say 'LeMond' in the first field.", sourcedStream.Source)); } if (row.Length >= 4 && row[3] == "gforce") { return new LeMondGForceCsvDataProvider(sourcedStream.Source, parser, row); } if (row.Length >= 4 && row[3] == "STN") { return new LeMondGForceSTNCsvDataProvider(sourcedStream.Source, parser, row); } else if (row.Length >= 2 && row[1] == "Revolution") { return new LeMondRevolutionCsvDataProvider(sourcedStream.Source, parser, row); } throw new Exception(string.Format("Not a recognized LeMond device. Header = '{0}'", string.Join(",", row))); }
// Constructor // filename - file from which to read the puzzle public Sudoku(string filename) { // TODO: Add the following exception handling // - Validate that the length of the first row is a perfect square value // - Validate that each line contains the same number of elements // - Validate that each element is a numeric value between 1 and gridSize or the unknown value using (TextFieldParser parser = new TextFieldParser(filename)) { int row = 0; parser.Delimiters = new string[] { "," }; while (!parser.EndOfData) { // Read in a line of the puzzle string[] parts = parser.ReadFields(); if (row == 0) { //Initialize the grid gridSize = parts.Length; subGridSize = Convert.ToInt32(Math.Sqrt(gridSize)); sudokuGrid = new NumberBlock[gridSize, gridSize]; // Initialize the range of possible block values rangeOfValues = new List<int>(Enumerable.Range(1, gridSize).ToList()); } // Populate the puzzle for (var col = 0; col < gridSize; col++) { sudokuGrid[row, col] = new NumberBlock(row, col, subGridSize, Convert.ToInt32(parts[col])); } row++; } } }
/// <summary> /// /// </summary> /// <param name="file"></param> /// <param name="delimiter">Expected delimiter is a pipe character: "|"</param> public void AddAdjectives() { // currently this only handles races and not deitys, adjectives, locations or whatnot (not really masculine/feminine either) DataTable nameListing = new DataTable(); nameListing.Columns.Add("Name", typeof(string)); using (TextFieldParser parser = new TextFieldParser(@"C:\Users\Michael Ovies\Source\Repos\WorldGenerator\WorldGen.Database\WordsAndNames\Adjectives.txt")) { parser.Delimiters = new string[] { "|" }; while (true) { string[] names = parser.ReadFields(); if (names == null) break; foreach (string name in names) { DataRow row = nameListing.NewRow(); row["Name"] = name; nameListing.Rows.Add(row); } } } // call the database // move this somewhere else when you have time SqlParameter[] parameters = new SqlParameter[1]; parameters[0] = new SqlParameter("@Words", nameListing); DataTable returnedTable = _dataSource.Crud("w.AdjectiveAdd", parameters); }
private void Continue_Click(object sender, RoutedEventArgs e) { continueButton.IsEnabled = false; bool flag = false; string[] lastDTS = {""}; dupeListBox.Items.Clear(); string[] dtsList = File.ReadAllLines(DTSListSuite.App.mDtsListFile); foreach (string line in dtsList) { if (line.Substring(0, 3) != "C/R") { TextFieldParser parser = new TextFieldParser(new StringReader(line)); parser.HasFieldsEnclosedInQuotes = true; parser.SetDelimiters(","); string[] ArLn = { "" }; while (!parser.EndOfData) ArLn = parser.ReadFields(); if (lastDTS.Length > 1) { if (ArLn[1] == lastDTS[1] && ArLn[2] == lastDTS[2] && ArLn[3] == lastDTS[3]) { dupeListBox.Items.Insert(0, ArLn[1] + " " + ArLn[2] + " " + ArLn[3] + "\tDuplicate!"); flag = true; } } lastDTS = ArLn; } } if (!flag) dupeListBox.Items.Insert(0, "No Duplicates!"); continueButton.IsEnabled = true; }
public Task<string[][]> GetDataFromCSVStream() { List<string[]> result = new List<string[]>(); TaskCompletionSource<string[][]> resultTask = new TaskCompletionSource<string[][]>(); try { HttpWebRequest req = (HttpWebRequest)WebRequest.Create(connectionURL); req.GetResponseAsync().ContinueWith( (task) => { WebResponse resp = task.Result; using (TextFieldParser parser = new TextFieldParser(resp.GetResponseStream())) { parser.Delimiters = new string[] { "," }; while (true) { string[] line = parser.ReadFields(); if (line == null) break; result.Add(line); } } resultTask.SetResult(result.ToArray()); } ); } catch (Exception e) { result.Add(new string[] { e.Message }); } return resultTask.Task; }
/// <summary> /// Uses the Microsoft Text Field Parser to parse the FixedWidth file, to give a baseline /// against an established class library for parsing. /// </summary> private static void _TextFieldParserFixedWidth() { string[] fields; string s; using (VB.TextFieldParser tfp = new VB.TextFieldParser(PerformanceTests.FW_DATA_FILE)) { tfp.TextFieldType = VB.FieldType.FixedWidth; tfp.SetFieldWidths(new int[PerformanceTests.NUMBER_OF_COLUMNS_IN_DATA] { 5, 5, 1, 28, 42, 15, 13, 9, 9, 1, 13, 14, 13, 6 }); tfp.CommentTokens = new string[] { "#" }; tfp.HasFieldsEnclosedInQuotes = true; while (!tfp.EndOfData) { fields = tfp.ReadFields(); for (int i = 0; i < fields.Length; ++i) { s = fields[i] as string; } } } }
protected internal IEnumerable<Dictionary<string, string>> ReadCsvRecords(string path) { var resolvedPath = ReflectedHost.ResolvePath(path); using (var parser = new TextFieldParser(resolvedPath)) { parser.TextFieldType = FieldType.Delimited; parser.Delimiters = new string[] { "," }; parser.TrimWhiteSpace = true; parser.HasFieldsEnclosedInQuotes = false; string[] headers = null; while (!parser.EndOfData) { var fields = parser.ReadFields(); if (headers == null) { headers = fields; } else { var record = new Dictionary<string, string>(); for (int fieldIndex = 0; fieldIndex < headers.Length; fieldIndex++) { record.Add(headers[fieldIndex], fields[fieldIndex]); } yield return record; } } } }
static void Main() { List<MoonPhase> moonPhases = new List<MoonPhase>(); using (TextFieldParser parser = new TextFieldParser(@"SQMData.csv")) { parser.TextFieldType = FieldType.Delimited; // Значения во входном файле разделены запятыми parser.SetDelimiters(","); // Указываем что следует игнорировать строки начинающиеся с # parser.CommentTokens = new string[] {"#"}; DateTime prevSkyclock = DateTime.MinValue; while (!parser.EndOfData) { string[] fields = null; try { fields = parser.ReadFields(); } catch (MalformedLineException) { // Игнорируем "плохие" строки continue; } // Поля в файле // 0 - Year // 1 - Month // 2 - Day // 3 - Local_time // 4 - day_of_year // 5 - hour_of_day // 6 - Sky_Quality_(mag/arc_sec_**2) // 7 - SQM_temperature_(Celsius) // 8 - cloud_cover_(%) // 9 - seeing_(1-5) // 10 - transparency_(1-5) // 11 - skyclock_time/date_used // 12 - sunrise // 13 - sunset // 14 - moonrise // 15 - moonset // 16 - moonphase DateTime skyclock = DateTime.ParseExact(fields[11], "yyyy-MM-dd HH:mm:ss", CultureInfo.InvariantCulture); int moonPhase = int.Parse(fields[16]); if (prevSkyclock != skyclock.Date) { moonPhases.Add(new MoonPhase { Date = skyclock.Date, Phase = moonPhase }); prevSkyclock = skyclock.Date; } } } foreach (MoonPhase phase in moonPhases) { Console.WriteLine("{0:d} - {1}", phase.Date, phase.Phase); } }
// ファイル入力 // readFileName:入力先のファイル名 // return:行ごとの情報を保持するリスト public static List<String[]> read(String readFileName) { // 読み込んできた行を保持するインスタンス List<String[]> readLines = new List<String[]>(); try { // テキストを読み込むためのインスタンス TextFieldParser parser = new TextFieldParser(readFileName); // 分割される状態をセット parser.TextFieldType = FieldType.Delimited; // 区切り文字の設定 parser.SetDelimiters(","); // ファイルの終端まで処理 while (!parser.EndOfData) { String[] row = parser.ReadFields(); // 1行読み込み readLines.Add(row); // 行を保持 } } catch (Exception e) { MessageBox.Show("" + e, "error", MessageBoxButtons.OK, MessageBoxIcon.Error); } return readLines; }
public static List<ForexRecord> BuildRecords() { var dataFile = ConfigurationManager.AppSettings["TestDataDirectory"] + "\\ForexTrading.csv"; var records = new List<ForexRecord>(); using (var parser = new TextFieldParser(dataFile)) { parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields == null) { continue; } records.Add(new ForexRecord { CurrencyPair = fields[0], Date = fields[1], Bid = double.Parse(fields[2]), Ask = double.Parse(fields[3]) }); } } return records; }
private void openButton_Click(object sender, EventArgs e) { List <string[]> parsedData = new List <string[]>(); try { // Detect the path of documents folder and assign it to initial directory path String myDocument = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); openFile.InitialDirectory = myDocument; if (openFile.ShowDialog() == DialogResult.OK) { Microsoft.VisualBasic.FileIO.TextFieldParser parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(openFile.FileName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); parser.TrimWhiteSpace = true; parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); parser.ReadLine(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); parsedData.Add(fields); Trace.WriteLine(parsedData); /*foreach (string field in fields) * { * parsedData.Add(field.ToString()); * Trace.WriteLine(field); * * } */ } } } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// Parses a big text blob into rows and columns, using the settings /// </summary> /// <param name="text">Big blob of text</param> /// <returns>Parsed data</returns> public List <string[]> Parse(string text) { // The actual _parsing_ .NET can handle. Well, VisualBasic anyway... using (var reader = new StringReader(text)) using (var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(reader)) { var errors = new StringBuilder(); if (this.CommentCharacter != default(char)) { parser.CommentTokens = new[] { this.CommentCharacter.ToString() } } ; parser.SetDelimiters(this.Separator.ToString()); parser.HasFieldsEnclosedInQuotes = this.TextQualifier != default(char); if (this.FieldWidths != null) { parser.TextFieldType = FieldType.FixedWidth; try { parser.SetFieldWidths(this.FieldWidths.ToArray()); } catch (Exception e) { errors.AppendLine(e.Message); } } var ret = new List <string[]>(); while (!parser.EndOfData) { try { ret.Add(parser.ReadFields()); } catch (MalformedLineException e) { errors.AppendFormat("Error on line {0}: {1}\n", e.LineNumber, e.Message); } } if (errors.Length > 0) { MessageBox.Show(errors.ToString(), "Errors"); } return(ret); } }
public static DataTable DataTableFromTextFile(string location, char delimiter) { DataTable result; List <string[]> data = new List <string[]>(); var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(location); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(new string[] { "," }); while (!parser.EndOfData) { string[] row = parser.ReadFields(); data.Add(row); } result = FormDataTable(data, delimiter); return(result); }
public void ReadLine_Sample() { const string input = @"Name,Birth Date Apollo Creed,1942-08-17 Ivan Drago,1961-11-03"; var parserReader = new StringReader(input); var parser = new NotVisualBasic.FileIO.CsvTextFieldParser(parserReader); var vbParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(new StringReader(input)); vbParser.SetDelimiters(","); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); Assert.Equal(vbParser.ReadLine(), parserReader.ReadLine()); Assert.Null(vbParser.ReadFields()); Assert.Null(parser.ReadFields()); Assert.True(vbParser.EndOfData); Assert.True(parser.EndOfData); }
static private void AddDataColumns(Microsoft.VisualBasic.FileIO.TextFieldParser csvReader, DataTable table) { string[] columns = csvReader.ReadFields(); //If table has columns already, we don't want to add any. if (table.Columns.Count == 0) { for (int i = 0; i < columns.Length; i++) { DataColumn dtcCol = new DataColumn(); dtcCol.AllowDBNull = true; dtcCol.ColumnName = columns[i]; table.Columns.Add(dtcCol); } DataColumn dtcCol2 = new DataColumn(); dtcCol2.AllowDBNull = true; dtcCol2.ColumnName = "Empty"; table.Columns.Add(dtcCol2); } }
/// <summary> /// Uses the Microsoft Text Field Parser to parse the CSV file, to give a baseline against an /// established class library for parsing. /// </summary> private static void _TextFieldParserCsv() { string[] fields; string s; using (VB.TextFieldParser tfp = new VB.TextFieldParser(PerformanceTests.CSV_DATA_FILE)) { tfp.SetDelimiters(","); tfp.CommentTokens = new string[] { "#" }; tfp.HasFieldsEnclosedInQuotes = true; while (!tfp.EndOfData) { fields = tfp.ReadFields(); for (int i = 0; i < fields.Length; ++i) { s = fields[i] as string; } } } }
public static bool GetCsvData(string CSVFileName, ref DataTable CSVTable) { Microsoft.VisualBasic.FileIO.TextFieldParser reader = default(Microsoft.VisualBasic.FileIO.TextFieldParser); string[] currentRow = null; DataRow dr = default(DataRow); string sqlColumnDataType = null; reader = My.Computer.FileSystem.OpenTextFieldParser(CSVFileName); reader.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; reader.Delimiters = new string[] { "," }; while (!reader.EndOfData) { try { currentRow = reader.ReadFields(); dr = CSVTable.NewRow(); for (currColumn = 0; currColumn <= CSVTable.Columns.Count - 1; currColumn++) { sqlColumnDataType = CSVTable.Columns(currColumn).DataType.Name; switch (sqlColumnDataType) { case "String": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToString(currentRow(currColumn)); } break; case "Decimal": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = 0; } else { dr.Item(currColumn) = Convert.ToDecimal(currentRow(currColumn)); } break; case "DateTime": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToDateTime(currentRow(currColumn)); } break; } } CSVTable.Rows.Add(dr); } catch (Microsoft.VisualBasic.FileIO.MalformedLineException ex) { Interaction.MsgBox("Line " + ex.Message + "is not valid." + Constants.vbCrLf + "Terminating Read Operation."); reader.Close(); reader.Dispose(); return(false); } finally { dr = null; } } reader.Close(); reader.Dispose(); return(true); }
private void Button6_Click(System.Object sender, System.EventArgs e) { // Define the Column Definition DataTable dt = new DataTable(); dt.Columns.Add("OrderID", typeof(int)); dt.Columns.Add("CustomerID", typeof(string)); dt.Columns.Add("EmployeeID", typeof(int)); dt.Columns.Add("OrderDate", typeof(System.DateTime)); dt.Columns.Add("RequiredDate", typeof(System.DateTime)); dt.Columns.Add("ShippedDate", typeof(System.DateTime)); dt.Columns.Add("ShipVia", typeof(int)); dt.Columns.Add("Freight", typeof(decimal)); dt.Columns.Add("ShipName", typeof(string)); dt.Columns.Add("ShipAddress", typeof(string)); dt.Columns.Add("ShipCity", typeof(string)); dt.Columns.Add("ShipRegion", typeof(string)); dt.Columns.Add("ShipPostalCode", typeof(string)); dt.Columns.Add("ShipCountry", typeof(string)); using (cn == new SqlConnection("Server='Server_Name';Database='Database_Name';Trusted_Connection=True;")) { cn.Open(); Microsoft.VisualBasic.FileIO.TextFieldParser reader = default(Microsoft.VisualBasic.FileIO.TextFieldParser); string[] currentRow = null; DataRow dr = default(DataRow); string sqlColumnDataType = null; reader = My.Computer.FileSystem.OpenTextFieldParser("C:\\Users\\Excel\\Desktop\\OrdersTest.csv"); reader.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; reader.Delimiters = new string[] { "," }; while (!reader.EndOfData) { try { currentRow = reader.ReadFields(); dr = dt.NewRow(); for (currColumn = 0; currColumn <= dt.Columns.Count - 1; currColumn++) { sqlColumnDataType = dt.Columns(currColumn).DataType.Name; switch (sqlColumnDataType) { case "String": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToString(currentRow(currColumn)); } break; case "Decimal": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = 0; } else { dr.Item(currColumn) = Convert.ToDecimal(currentRow(currColumn)); } break; case "DateTime": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToDateTime(currentRow(currColumn)); } break; } } dt.Rows.Add(dr); } catch (Microsoft.VisualBasic.FileIO.MalformedLineException ex) { Interaction.MsgBox("Line " + ex.Message + "is not valid." + Constants.vbCrLf + "Terminating Read Operation."); reader.Close(); reader.Dispose(); //Return False } finally { dr = null; } } using (SqlBulkCopy copy = new SqlBulkCopy(cn)) { copy.DestinationTableName = "[dbo].[Orders]"; copy.WriteToServer(dt); } } }
private void AddDataFromFile(string data_file) { using (TextFieldParser parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(data_file)) { parser.TrimWhiteSpace = true; parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters("\t"); System.Windows.Forms.DataVisualization.Charting.Series series = null; while (true) { string[] parts = parser.ReadFields(); if (parts == null) { break; } if (parts.Length >= 3) { string name = parts[0]; string date = parts[1]; string value = parts[2]; if (series == null) { if (chartQuotes.Series.FindByName(name) == null) { chartQuotes.Series.Add(name); series = chartQuotes.Series[name]; series.ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.Line; } else { break; } } DateTime dt; double d; if (Double.TryParse(value, out d) && DateTime.TryParse(date, out dt)) { series.Points.AddXY(dt, d); if (!dateMin.HasValue) { dateMin = new DateTime(); dateMin = dt; } else if (dt < dateMin) { dateMin = dt; } if (!dateMax.HasValue) { dateMax = new DateTime(); dateMax = dt; } else if (dt > dateMax) { dateMax = dt; } } } } } if (dateMin.HasValue && dateMax.HasValue) { chartQuotes.ChartAreas[0].AxisX.Minimum = dateMin.Value.ToOADate(); chartQuotes.ChartAreas[0].AxisX.Maximum = dateMax.Value.ToOADate(); dateFrom.Value = dateMin.Value; dateTo.Value = dateMax.Value; } }
public static void ImportCSVtoSubModel( string inputFn, AdminShell.AdministrationShellEnv env, AdminShell.Submodel sm, AdminShell.SubmodelRef smref) { AdminShell.SubmodelElementCollection[] propGroup = new AdminShell.SubmodelElementCollection[10]; int i_propGroup = 0; var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(inputFn); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(";"); string[] rows = parser.ReadFields(); if (rows == null) { throw new InvalidOperationException( $"There were no fields read from the inputFn: {inputFn}"); } if ((rows[0] != "typeName" || rows[1] != "idShort" || rows[2] != "value") || (rows.Length > 3 && ( rows[3] != "valueType" || rows[4] != "category" || rows[5] != "descriptionEN" || rows[6] != "descriptionDE" ))) { return; } sm.idShort = inputFn.Split('\\').Last().Replace(".csv", ""); while (!parser.EndOfData) { rows = parser.ReadFields(); if (rows == null) { throw new InvalidOperationException( $"There were no fields read from inputFn: {inputFn}"); } switch (rows[0]) { case "SubmodelElementCollection": propGroup[i_propGroup] = AdminShell.SubmodelElementCollection.CreateNew(rows[1]); if (i_propGroup == 0) { sm.Add(propGroup[0]); if (rows.Length > 3) { if (rows[7] != "") { propGroup[0].semanticId = new AdminShellV20.SemanticId( AdminShell.Reference.CreateNew( "ConceptDescription", false, "IRI", rows[7])); } } propGroup[0].kind = AdminShellV20.ModelingKind.CreateAsInstance(); } else { propGroup[i_propGroup - 1].Add(propGroup[i_propGroup]); } i_propGroup++; break; case "End-SubmodelElementCollection": if (i_propGroup != 0) { i_propGroup--; } break; case "Property": var p = AdminShell.Property.CreateNew(rows[1].Replace("-", "_")); p.value = rows[2]; if (rows.Length > 3) { p.valueType = rows[3]; p.category = rows[4]; if (rows[5] != "") { p.AddDescription("en", rows[5]); } if (rows[6] != "") { p.AddDescription("de", rows[6]); } p.kind = AdminShellV20.ModelingKind.CreateAsInstance(); if (rows[7] != "") { p.semanticId = new AdminShell.SemanticId( AdminShell.Reference.CreateNew( "ConceptDescription", false, "IRI", rows[7])); } } if (i_propGroup == 0) { sm.Add(p); } else { propGroup[i_propGroup - 1].Add(p); } break; } } }
protected void btnUpload_Click(object sender, EventArgs e) { var valid = true; if (!FileUploadControl.HasFile) { return; } var filename = FileUploadControl.FileName; var s = filename.Split('.'); var extension = s[s.Length - 1]; var saveAsFileName = "UploadKuotaCutiPegawai_" + DateTime.Now.ToString("yyyyMMddhhmmss") + "." + extension; var contenttype = FileUploadControl.PostedFile.ContentType; var log = ""; try { if (contenttype == "application/octet-stream" || contenttype == "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" || contenttype == "application/vnd.ms-excel" || contenttype == "text/csv") { FileUploadControl.SaveAs(Server.MapPath(@"../../temp/xls/") + saveAsFileName); log += "Upload file " + filename + " success.\t" + DateTime.Now.ToString(CultureInfo.InvariantCulture) + "\n\r"; var uploadedPath = Server.MapPath(@"../../temp/xls/") + saveAsFileName; var ctr = 0; var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(uploadedPath); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(new string[] { "," }); while (!parser.EndOfData) { string[] row = parser.ReadFields(); if (ctr == 0) { ctr = 1; } else { if (row != null) { var employee_code = row[0]; var employee_name = row[1]; var start_date = row[4]; var end_date = row[5]; var Quota = row[6]; if (employee_code != "NIK") { try { //InsertXlsRowData(row); HR_CUTI_KUOTA_DAL _dalHR_CUTI_KUOTA_DAL = null; Hashtable htParameters = null; _dalHR_CUTI_KUOTA_DAL = new HR_CUTI_KUOTA_DAL(); htParameters = new Hashtable(); var iNextId = 0; htParameters.Clear(); htParameters["p_NIK"] = employee_code; htParameters["p_START_DATE"] = Utility.ToDateTime(start_date); htParameters["p_END_DATE"] = Utility.ToDateTime(end_date); htParameters["p_Quota"] = Quota; Utility.ApplyDefaultProp(htParameters); _dalHR_CUTI_KUOTA_DAL.InsertTempBalance(htParameters, ref iNextId); log += "Insert Data : " + employee_code + " - " + employee_name + " Success.\t" + DateTime.Now.ToString(CultureInfo.InvariantCulture) + "\n\r"; } catch (Exception exc) { log += "Insert Data : " + employee_code + " - " + employee_name + " Failed.\t" + exc.InnerException.Message + "\t" + DateTime.Now.ToString(CultureInfo.InvariantCulture) + "\n\r"; valid = false; // Utility.ShowMessageBox(this, Utility.LOAD_DATA_FAIL_MESSAGE, ex, null, null); } } } } } Utility.ShowMessageBox(this, valid ? "Upload Success" : "Upload Success but error in processing data. Check Log for detail info.", null, "kuotacutiimport.aspx"); //BindGridUpload(); } else { Utility.ShowMessageBox(this, "Error Excel Format", null, null); } } catch (Exception ex) { Utility.ShowMessageBox(this, "Upload Failed.", ex, null); } CreateLog(log); }
public static List <CompressorInputLine> Create(Stream inputFile) { var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(inputFile); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.Delimiters = new string[] { "\t" }; //skip the first line, it's header info parser.ReadLine(); var inputLines = new List <CompressorInputLine>(); while (!parser.EndOfData) { string[] tokens = parser.ReadFields(); //column order from the most recent CSV file we've seen //Id Asset Name Local Timestamp UTC Milliseconds Compressor Oil Pressure Engine Oil Pressure Fuel Pressure Stage 1 Discharge Pressure Stage 2 Discharge Pressure Stage 3 Discharge Pressure Suction Pressure Max Discharge Pressure Max Suction Pressure Compressor Oil Temp Cylinder 1 Discharge Temp Cylinder 2 Discharge Temp Cylinder 3 Discharge Temp Cylinder 4 Discharge Temp Engine Oil Temp Suction Temp RPM Max RPMs Run Status SD Status Code Runtime Hrs Downtime Hrs Yest Gas Flow Rate Gas Flow Rate_RAW Max Gas Flowrate Compressor Stages Horsepower Unit Size Last Successful Comm Time Pct Successful Msgs Today Successful Msgs Today Facility Desc Facility ID TOW Comp Name //Successful Msgs Today Facility Desc Facility ID TOW Comp Name inputLines.Add(new CompressorInputLine() { Id = parseInt(tokens[0]), AssetName = tokens[1], LocalTimestamp = parseDate(tokens[2]), UTCMilliseconds = parseInt(tokens[3]), CompressorOilPressure = parseDouble(tokens[4]), CompressorOilTemp = parseDouble(tokens[5]), CompressorStages = parseInt(tokens[6]), Cylinder1DischargeTemp = parseDouble(tokens[7]), Cylinder2DischargeTemp = parseDouble(tokens[8]), Cylinder3DischargeTemp = parseDouble(tokens[9]), Cylinder4DischargeTemp = parseDouble(tokens[10]), DowntimeHrsYest = parseDouble(tokens[11]), EngineOilPressure = parseDouble(tokens[12]), EngineOilTemp = parseDouble(tokens[13]), FacilityDesc = tokens[14], FacilityId = tokens[15], FuelPressure = parseDouble(tokens[16]), GasFlowRate = parseDouble(tokens[17]), GasFlowRate_RAW = parseDouble(tokens[18]), Horsepower = parseDouble(tokens[19]), LastSuccessfulCommTime = parseDate(tokens[20]), MaxDischargePressure = parseDouble(tokens[21]), MaxGasFlowrate = parseDouble(tokens[22]), MaxRPMs = parseDouble(tokens[23]), MaxSuctionPressure = parseDouble(tokens[24]), PctSuccessfulMsgsToday = parseDouble(tokens[25]), RPM = parseDouble(tokens[26]), RunStatus = tokens[27], RuntimeHrs = parseDouble(tokens[28]), SDStatusCode = tokens[29], Stage1DischargePressure = parseDouble(tokens[30]), Stage2DischargePressure = parseDouble(tokens[31]), Stage3DischargePressure = parseDouble(tokens[32]), SuccessfulMsgsToday = parseInt(tokens[33]), SuctionPressure = parseDouble(tokens[34]), SuctionTemp = parseDouble(tokens[35]), TOWCompName = tokens[36], UnitSize = tokens[37] }); } return(inputLines); }
public static void ImportCSVtoSubModel( string inputFn, AdminShell.AdministrationShellEnv env, AdminShell.Submodel sm, AdminShell.SubmodelRef smref) { AdminShell.SubmodelElementCollection[] propGroup = new AdminShell.SubmodelElementCollection[10]; int i_propGroup = 0; var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(inputFn); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(";"); string[] rows = parser.ReadFields(); if (rows == null || rows.Length < 3 || rows[0] != "typeName" || rows[1] != "idShort" || rows[2] != "value") { return; } while (!parser.EndOfData) { rows = parser.ReadFields(); if (rows == null || rows.Length < 1) { continue; } switch (rows[0]) { case "SubmodelElementCollection": propGroup[i_propGroup] = AdminShell.SubmodelElementCollection.CreateNew(rows[1]); if (i_propGroup == 0) { sm.Add(propGroup[0]); } else { propGroup[i_propGroup - 1].Add(propGroup[i_propGroup]); } i_propGroup++; break; case "End-SubmodelElementCollection": if (i_propGroup != 0) { i_propGroup--; } break; case "Property": var p = AdminShell.Property.CreateNew(rows[1]); p.valueType = "string"; p.value = rows[2]; if (i_propGroup == 0) { sm.Add(p); } else { propGroup[i_propGroup - 1].Add(p); } break; } } }
public static void Main(string[] args) { // HARD_CODED FOR EXAMPLE ONLY - TO BE RETRIEVED FROM APP.CONFIG IN REAL PROGRAM string hospPath = @"C:\\events\\inbound\\OBLEN_COB_Active_Inv_Advi_Daily_.csv"; string vendPath = @"C:\\events\\outbound\\Advi_OBlen_Active_Inv_Ack_Daily_.csv"; List <DenialRecord> hospList = new List <DenialRecord>(); List <DenialRecord> vendList = new List <DenialRecord>(); //List<DenialRecord> hospExcpt = new List<DenialRecord>(); // Created at point of use for now //List<DenialRecord> vendExcpt = new List<DenialRecord>(); // Created at point of use for now using (TextFieldParser hospParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(hospPath)) { hospParser.TextFieldType = FieldType.Delimited; hospParser.SetDelimiters(","); hospParser.HasFieldsEnclosedInQuotes = false; hospParser.TrimWhiteSpace = true; while (!hospParser.EndOfData) { try { string[] row = hospParser.ReadFields(); if (row.Length <= 7) { DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]); hospList.Add(dr); } } catch (Exception e) { // do something Console.WriteLine("Error is: {0}", e.ToString()); } } hospParser.Close(); hospParser.Dispose(); } using (TextFieldParser vendParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(vendPath)) { vendParser.TextFieldType = FieldType.Delimited; vendParser.SetDelimiters(","); vendParser.HasFieldsEnclosedInQuotes = false; vendParser.TrimWhiteSpace = true; while (!vendParser.EndOfData) { try { string[] row = vendParser.ReadFields(); if (row.Length <= 7) { DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]); vendList.Add(dr); } } catch (Exception e) { // do something Console.WriteLine("Error is: {0}", e.ToString()); } } vendParser.Close(); vendParser.Dispose(); } // Compare the lists each way for denials not in the other source List <DenialRecord> hospExcpt = hospList.Except(vendList).ToList(); List <DenialRecord> vendExcpt = vendList.Except(hospList).ToList(); }
public CSVFile UploadToObject(string project, string csvFile) // Parses the uploaded file to a CSVFile object containing alll necessary data from the file { var nameList = new List <string>(); var valueList = new List <List <string> >(); var typeList = new List <string>(); var path = Path.Combine(Server.MapPath("~/App_Data/Files/"), csvFile); var parser = new Microsoft.VisualBasic.FileIO.TextFieldParser(path, System.Text.Encoding.UTF7); parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; // delimiter set to ; - this is where we could change code to handle other delimiters as well parser.SetDelimiters(new string[] { ";" }); int i = 0; // Parse the file content while (!parser.EndOfData) { // Get one row of data string[] row = parser.ReadFields(); // Get the column names if (i == 0) { nameList = row.ToList(); } else // get file content { valueList.Add(row.ToList()); if (i == 1) { // Define types of the elements of the first row with data typeList = row.ToList(); int j = 0; foreach (var term in row) { if (double.TryParse(term, out double n)) { typeList[j] = "N"; } else { typeList[j] = "S"; } j++; } } else { // Check the types of the eleents of the following rows, change to S (string) if we detect one int j = 0; foreach (var term in row) { if (!double.TryParse(term, out double n)) { typeList[j] = "S"; } j++; } } } i++; } // Create CSVFile object, give the properties values and return CSVFile myFile = new CSVFile(); myFile.Project = project; myFile.NameList = nameList; myFile.ValueList = valueList; myFile.TypeList = typeList; return(myFile); }