public bool Read() { if (!File.Exists(this.filename)) { return(false); } TextFieldParser textFieldParser = (TextFieldParser)null; this.dataList.Clear(); try { textFieldParser = new TextFieldParser(this.filename, this.encode); textFieldParser.TextFieldType = FieldType.Delimited; textFieldParser.SetDelimiters(","); textFieldParser.TrimWhiteSpace = false; while (!textFieldParser.EndOfData) { this.dataList.Add(textFieldParser.ReadFields()); } } catch { return(false); } finally { if (textFieldParser != null) { textFieldParser.Close(); textFieldParser.Dispose(); } } return(true); }
/* * Function takes in user specified csv file, parses file and puts file * contents into a list */ public List <string[]> parseCSV(string file) { List <string[]> parsedData = new List <string[]>(); string[] fields; TextFieldParser parser = new TextFieldParser(file); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { fields = parser.ReadFields(); StripFront(fields); StripBack(fields); parsedData.Add(fields); } parser.Close(); return(parsedData); }
public DataTable LoadDataTableFromCSV2(string csvPath, char?separator = null) { if (MyLoadDataTableFromCSV != null) { return(MyLoadDataTableFromCSV(csvPath, separator)); } DataTable result = null; bool isHeader = true; TextFieldParser csvParser = null; try { csvParser = new TextFieldParser(csvPath, DefaultEncoding); } catch { //Try by copying the file... string newPath = FileHelper.GetTempUniqueFileName(csvPath); File.Copy(csvPath, newPath); csvParser = new TextFieldParser(newPath, DefaultEncoding); FileHelper.PurgeTempApplicationDirectory(); } if (separator == null) { separator = ','; } //csvParser.CommentTokens = new string[] { "#" }; csvParser.SetDelimiters(new string[] { separator.ToString() }); csvParser.HasFieldsEnclosedInQuotes = true; while (!csvParser.EndOfData) { string[] fields = csvParser.ReadFields(); if (isHeader) { result = new DataTable(); for (int i = 0; i < fields.Length; i++) { result.Columns.Add(new DataColumn(fields[i], typeof(string))); } isHeader = false; } else { var row = result.Rows.Add(); for (int i = 0; i < fields.Length && i < result.Columns.Count; i++) { row[i] = fields[i]; if (row[i].ToString().Contains("\0")) { row[i] = ""; } } } } csvParser.Close(); return(result); }
public Kontener InReader() { TextFieldParser csvParser = new TextFieldParser(path); csvParser.CommentTokens = new string[] { "#" }; csvParser.SetDelimiters(new string[] { ";" }); csvParser.HasFieldsEnclosedInQuotes = true; // Skip the row with the column names // csvParser.ReadLine(); while (!csvParser.EndOfData) { // Read current line fields, pointer moves to the next line. string[] fields = csvParser.ReadFields(); string One = fields[11]; string Two = fields[12]; string Three = fields[13]; string Four = fields[14]; string Five = fields[15]; kontener.Hozzad(new Szamok(int.Parse(One), int.Parse(Two), int.Parse(Three), int.Parse(Four), int.Parse(Five))); } csvParser.Close(); return(kontener); }
public void Close() { var path = GetTestFilePath(); File.WriteAllText(path, "abc123"); using (var stream = new FileStream(path, FileMode.Open)) { using (var parser = new TextFieldParser(stream)) { parser.Close(); } } using (var parser = new TextFieldParser(path)) { parser.Close(); } { var parser = new TextFieldParser(path); parser.Close(); parser.Close(); } { TextFieldParser parser; using (parser = new TextFieldParser(path)) { } parser.Close(); } }
private void LoadMapColors() { Convert.Colors.mappalette.Clear(); TextFieldParser parser = new TextFieldParser(System.AppDomain.CurrentDomain.SetupInformation.ApplicationBase + @"\data\color\" + comboBox2.SelectedItem.ToString() + @"\MapColor.csv", Encoding.GetEncoding("Shift_JIS")); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); parser.ReadFields(); while (parser.EndOfData == false) { var block = new Convert.Block(); string[] column = parser.ReadFields(); block.ID = int.Parse(column[0]); block.R = int.Parse(column[1]); block.G = int.Parse(column[2]); block.B = int.Parse(column[3]); Convert.Colors.mappalette.Add(block); } parser.Close(); }
public void FillExceptions() { string sourceFilePath = @"\\mede1\partage\,FGA Front Office\02_Gestion_Actions\00_BASE\Base 2.0\NettoyageExceptions.csv"; FileInfo myFile = new FileInfo(sourceFilePath); if (!myFile.Exists) { return; } exceptions = new Dictionary <string, string>(); try { TextFieldParser csvReader = new TextFieldParser(myFile.ToString()); csvReader.SetDelimiters(new String[] { ";" }); csvReader.HasFieldsEnclosedInQuotes = false; while (!csvReader.EndOfData) { String[] line = csvReader.ReadFields(); exceptions.Add(line[0].ToString(), line[1].ToString()); } csvReader.Close(); } catch { MessageBox.Show("Impossible d'ouvrir le fichier:\n" + sourceFilePath); } }
/// <summary> /// Creates the list of the _cities from the .csv file /// </summary> /// <returns></returns> private List <string[]> CreateListOfCities(double longitude, double latitude) { var cityData = new List <string[]>(); var parser = new TextFieldParser(@"Resources\worldcities.csv"); //offset to the second line parser.ReadLine(); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { var fields = parser.ReadFields(); if (fields != null) { var city = fields[6]; double lon, lat; if (double.TryParse(fields[8], out lon) && double.TryParse(fields[7], out lat)) { var dist = CalculateDistance(longitude, latitude, lon, lat); CheckCityDistanceFromEarthquake(dist, city); } } cityData.Add(fields); } parser.Close(); return(cityData); }
public void Close() { string data = @"abc123"; using (var stream = GetStream(data)) { using (var parser = new TextFieldParser(stream)) { parser.Close(); } } using (var parser = new TextFieldParser(GetStream(data))) { parser.Close(); } { var parser = new TextFieldParser(GetStream(data)); parser.Close(); parser.Close(); } { TextFieldParser parser; using (parser = new TextFieldParser(GetStream(data))) { } parser.Close(); } }
// TODO: Move to common static method to share with IMDb private bool ParseCSVFile(string aFilename, out List <Dictionary <string, string> > aParsedCSV) { aParsedCSV = new List <Dictionary <string, string> >(); if (!File.Exists(aFilename)) { return(false); } string[] lFieldHeadings = new string[] { }; int lRecordNumber = 0; try { var lParser = new TextFieldParser(aFilename) { TextFieldType = FieldType.Delimited }; lParser.SetDelimiters(","); while (!lParser.EndOfData) { lRecordNumber++; // processing fields in row string[] lFields = lParser.ReadFields(); // get header fields // line number increments after first read if (lRecordNumber == 1) { lFieldHeadings = lFields; continue; } if (lFields.Count() != lFieldHeadings.Count()) { continue; } // get each field value int lIndex = 0; var lExportItem = new Dictionary <string, string>(); foreach (string field in lFields) { lExportItem.Add(lFieldHeadings[lIndex], field); lIndex++; } // add to list of items aParsedCSV.Add(lExportItem); } lParser.Close(); } catch { return(false); } return(true); }
/// <summary> /// Generic method to process the csv source file by Scheme /// </summary> /// <param name="root">the root path (e.g: Educator, Student, etc.)</param> /// <param name="jsonFile">JSON file name only, no path included</param> /// <returns>Role and array of parameters</returns> protected static IEnumerable SchemeDataSource(string root, string jsonFile, bool firstLineOnly = true) { string dir = AppDomain.CurrentDomain.BaseDirectory + root; Assert.True(FileSystem.FileExists(dir + "\\Scheme\\" + jsonFile), "JSON File not found : " + jsonFile); DataSourceScheme dataSourceScheme = JsonConvert.DeserializeObject <DataSourceScheme>( File.ReadAllText(dir + "\\Scheme\\" + jsonFile)); var url = EnvironmentReader.Base_URL; string csvPath = ""; string fullPathCsvFile = dir + "\\" + csvPath; foreach (var scenario in dataSourceScheme.Scenarios) { if (scenario.ParameterFileSource != "") { Assert.True(FileSystem.FileExists(fullPathCsvFile + scenario.ParameterFileSource + ".csv"), "CSV File source not found : " + scenario.ParameterFileSource + ".csv"); var reader = new TextFieldParser(fullPathCsvFile + scenario.ParameterFileSource + ".csv"); reader.SetDelimiters(","); bool moreLines = true; while (!reader.EndOfData && moreLines) { string[] fields = reader.ReadFields(); List <string> parameters = new List <string>(); if (fields != null) { foreach (var field in fields) { parameters.Add(field); } } if (firstLineOnly) { moreLines = false; } if (scenario.Role != Roles.None) { yield return(new TestCaseData(scenario.Role, parameters)); } else { yield return(new TestCaseData(parameters)); } } reader.Close(); } else { if (scenario.Role != Roles.None) { yield return(new TestCaseData(scenario.Role)); } } } }
//追加する行を受け取り配列を再度確保する public string[,] AppendLine(string[,] apTable, string[] append_line, string FilePath, Encoding encode) { FileStream fs = new FileStream(FilePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); TextFieldParser parser = new TextFieldParser(fs, encode); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); row = 0; //要素数の決定 while (!parser.EndOfData) { int colcnt = 0; string[] line = parser.ReadFields(); row++; foreach (string tmp in line) { colcnt++; } if (colcnt > col) { col = colcnt; } } apTable = new string[row + 1, col]; { //ここから2次元配列に格納 int r = 0, c = 0; string[] line2; parser = new TextFieldParser(FilePath, encode); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { //1行ずつね line2 = parser.ReadFields(); c = 0; foreach (string tmp in line2) { apTable[r, c] = tmp; c++; } for (; c < this.col; c++) { apTable[r, c] = string.Empty; } r++; } for (int i = 0; i < append_line.Length; i++) { apTable[r, i] = append_line[i]; } } parser.Dispose(); parser.Close(); return(apTable); }
public void Close() { if (_parser != null) { _parser.Close(); } _parser = null; }
public void Close() { if (_textFieldParser != null) { _textFieldParser.Close(); _textFieldParser = null; } }
static public int FieldCount(string delimiter, string path) { using TextFieldParser parser = new TextFieldParser(path); parser.Delimiters = new string[] { delimiter }; string[] parts = parser.ReadFields(); parser.Close(); return(parts.Length); }
/// <summary> /// 读取csv文件到DataTable /// </summary> /// <param name="filePath"></param> /// <param name="header">是否有表头</param> /// <returns></returns> public static DataTable ReadData(String filePath, Boolean header) { DataTable dt = new DataTable(); if (File.Exists(filePath)) { DataRow row = null; String[] rowArr = null; using (TextFieldParser parser = new TextFieldParser(filePath, Encoding.GetEncoding("GB2312"))) { parser.Delimiters = new String[] { "\t", "," }; parser.TrimWhiteSpace = true; while (!parser.EndOfData) { if (parser.LineNumber == 1) { rowArr = parser.ReadFields(); if (!header) { for (int i = 0; i < rowArr.Length; i++) { dt.Columns.Add(i.ToString()); } row = dt.NewRow(); for (int i = 0; i < dt.Columns.Count; i++) { row[i] = rowArr[i]; } dt.Rows.Add(row); } else { foreach (String col in rowArr) { dt.Columns.Add(col); } } } else { rowArr = parser.ReadFields(); row = dt.NewRow(); for (int i = 0; i < dt.Columns.Count; i++) { row[i] = rowArr[i]; } dt.Rows.Add(row); } } parser.Close(); } } else { throw new FileNotFoundException(); } return(dt); }
public string ManageEstablishmentBranchesWithTextFile(Stream stream) { try { TextFieldParser tfp = new TextFieldParser(stream); tfp.TextFieldType = FieldType.Delimited; tfp.SetDelimiters("|"); DataTable establishmentBranchesInTextFile = new DataTable(); establishmentBranchesInTextFile.Columns.Add("education_establishment_code"); establishmentBranchesInTextFile.Columns.Add("branch_of_study_code"); while (!tfp.EndOfData) { string[] fields = tfp.ReadFields(); if (fields.Count() == 2) { DataRow dr = establishmentBranchesInTextFile.NewRow(); dr.ItemArray = fields; establishmentBranchesInTextFile.Rows.Add(dr); DataTable establishmentBranch = establishmentBranchDAO.SelectEstablishmentBranch(fields[0], fields[1]); if (establishmentBranch.Rows.Count == 0) { establishmentBranchDAO.InsertEstablishmentBranch(fields[0], fields[1]); } establishmentBranch.Clear(); } else { return("Please upload a text file with the correct format. " + "Make sure to follow this format: [Education Establishment Code]|[Branch of Study Code]"); } } tfp.Close(); DataTable establishmentBranchesInDatabase = establishmentBranchDAO.SelectAllEstablishmentBranch(); IEnumerable <DataRow> dt3 = (from r in establishmentBranchesInDatabase.AsEnumerable() where !establishmentBranchesInTextFile.AsEnumerable().Any(r2 => r["education_establishment_code"].ToString().Trim().ToLower() == r2["education_establishment_code"].ToString().Trim().ToLower() && r["branch_of_study_code"].ToString().Trim().ToLower() == r2["branch_of_study_code"].ToString().Trim()) select r); if (dt3.Any()) { DataTable establishmentCertificatesToBeDeleted = dt3.CopyToDataTable(); foreach (DataRow dr in establishmentCertificatesToBeDeleted.Rows) { establishmentBranchDAO.DeleteEstablishmentBranch(dr[0].ToString(), dr[1].ToString()); } } return("Establishment Branch data successfully edited."); } catch (Exception e1) { Debug.WriteLine("Text File Parse Exception Type: " + e1.GetType() + "\nMessage: " + e1.Message + "\nStack Trace: " + e1.StackTrace); return("Sorry, an error occured. Please try again."); } }
public void fetchMyOrders() { //on va commencer par exporter les données des orders en cliquant sur le bouton Automation.I.mouseClick(ExportLocation); Automation.I.waitAWhile(); //ensuite on importe les commandes var directory = new DirectoryInfo("C:\\Users\\Stéphane\\Documents\\EVE\\logs\\Marketlogs"); var myOrdersFile = directory.GetFiles() .OrderByDescending(f => f.LastWriteTime) .First(); TextFieldParser parser = new TextFieldParser(myOrdersFile.FullName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); int index = 0; List <OrderElement> newOrders = new List <OrderElement>(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); if (index != 0) { OrderElement toAdd = new OrderElement(double.Parse(fields[10], CultureInfo.InvariantCulture), bool.Parse(fields[9])); toAdd.orderId = fields[0]; toAdd.typeId = fields[1]; toAdd.charId = fields[2]; toAdd.charName = fields[3]; toAdd.regionId = fields[4]; toAdd.regionName = fields[5]; toAdd.stationId = fields[6]; toAdd.stationName = fields[7]; toAdd.range = int.Parse(fields[8]); toAdd.volumeEntered = int.Parse(fields[11]); toAdd.volumeRemaining = double.Parse(fields[12], CultureInfo.InvariantCulture); toAdd.issueDate = DateTime.Parse(fields[13]); toAdd.orderState = fields[14]; toAdd.minVolume = int.Parse(fields[15]); toAdd.accountId = fields[16]; toAdd.duration = int.Parse(fields[17]); toAdd.isCorp = bool.Parse(fields[18]); toAdd.solarSystemId = fields[19]; toAdd.solarSystemName = fields[20]; toAdd.escrow = double.Parse(fields[21], CultureInfo.InvariantCulture); newOrders.Add(toAdd); } index++; } oldOrders = orders; orders = newOrders; parser.Close(); Automation.I.waitAWhile(); try { File.Delete(myOrdersFile.FullName); }catch (Exception e) {} }
/// <summary> /// Read the csv and populate the clsCSVLine objects /// </summary> public void ReadCSV() { string[] fields; string[] nlines; TextFieldParser parser; clsCSVLine clscsvline; bool headerline; if (!File.Exists(fileName)) { return; } nlines = File.ReadAllLines(this.fileName); headerline = this.hasheader; foreach (string csvline in nlines) { //There is no point in reading empty lines if (csvline.Trim() == "") { continue; } clscsvline = new clsCSVLine(); clscsvline.line = csvline; parser = new TextFieldParser(new StringReader(csvline)); parser.HasFieldsEnclosedInQuotes = true; parser.SetDelimiters(","); while (!parser.EndOfData) { fields = parser.ReadFields(); foreach (string field in fields) { //if csv has headers read first line as header, set first line to false if (headerline) { this.header.Add(field); clscsvline.header.Add(field); } else { clscsvline.header = this.header; clscsvline.hasheader = this.hasheader; //else read as regular line clscsvline.fields.Add(field); } } if (headerline) { headerline = false; } this.lines.Add(clscsvline); } parser.Close(); } }
public string ManageBloodTypesWithTextFile(Stream stream) { try { TextFieldParser tfp = new TextFieldParser(stream); tfp.TextFieldType = FieldType.Delimited; tfp.SetDelimiters("|"); DataTable bloodTypesInTextFile = new DataTable(); bloodTypesInTextFile.Columns.Add("blood_type"); while (!tfp.EndOfData) { string[] fields = tfp.ReadFields(); DataRow dr = bloodTypesInTextFile.NewRow(); dr.ItemArray = fields; bloodTypesInTextFile.Rows.Add(dr); if (fields.Count() == 1) { DataTable bloodType = bloodTypeDAO.SelectBloodType(fields[0]); if (bloodType.Rows.Count == 0) { bloodTypeDAO.InsertBloodType(fields[0]); } bloodType.Clear(); } else { return("Please upload a text file with the correct format. " + "Make sure to follow this format: [Blood Type]"); } } tfp.Close(); DataTable bloodTypesInDatabase = bloodTypeDAO.SelectAllBloodType(); IEnumerable <string> bloodTypesNotInTextFile = bloodTypesInDatabase.AsEnumerable().Select(r => r.Field <string>("blood_type")) .Except(bloodTypesInTextFile.AsEnumerable().Select(r => r.Field <string>("blood_type"))); if (bloodTypesNotInTextFile.Any()) { DataTable bloodTypesToBeDeleted = (from row in bloodTypesInDatabase.AsEnumerable() join id in bloodTypesNotInTextFile on row.Field <string>("blood_type") equals id select row).CopyToDataTable(); foreach (DataRow dr in bloodTypesToBeDeleted.Rows) { bloodTypeDAO.DeleteBloodType(dr[0].ToString()); } } return("Blood Type data successfully edited."); } catch (Exception e1) { Debug.WriteLine("Text File Parse Exception Type: " + e1.GetType() + "\nMessage: " + e1.Message + "\nStack Trace: " + e1.StackTrace); return("Sorry, an error occured. Please try again."); } }
private List <BenRecord> GetFileList() { List <BenRecord> downloadList = new List <BenRecord>(); string dirFile = m_tempDirectoryName + "bendir.txt"; try { //delete the existing dir file if one exists. if (FileSystem.FileExists(dirFile)) { FileSystem.DeleteFile(dirFile); } //build new dir files. BuildBenLinkDirINI(); ExecBenCommand(); // build list of records to download if (FileSystem.FileExists(dirFile)) { TextFieldParser dirReader = FileSystem.OpenTextFieldParser(dirFile, new string[] { "\t" }); while (!dirReader.EndOfData) { string[] curRow = dirReader.ReadFields(); if (Convert.ToInt32(curRow[2]) < BENMAXFILESIZE) { BenRecord curRecord = new BenRecord(Convert.ToInt32(curRow[0]), Convert.ToDateTime(curRow[1]), Convert.ToInt32(curRow[2])); if (curRecord.DateTime > DateTime.UtcNow.AddDays(-30) && curRecord.DateTime > m_lastFileDownloaded.DateTime) { downloadList.Add(curRecord); } } else { Program.Log("File too large Error: " + m_siteName + " - " + Convert.ToString(curRow[0]), m_tempDirectoryName); } } dirReader.Close(); } else { throw new Exception("GetFileList Error: " + m_siteName + " - dir file does not exist."); } } catch (Exception ex) { Program.Log("GetFileList Error: " + m_siteName + " - " + ex.ToString(), m_tempDirectoryName); throw new Exception("GetFileList Error: " + m_siteName + " - " + ex.ToString()); } return(downloadList); }
private static void updateItems(string fileName, string ConnectionString, string columnName, string dataTable, string tempTable, int fieldNo) { using (var conn = new NpgsqlConnection(ConnectionString)) { conn.Open(); DataTable thisCol = new DataTable(); TextFieldParser reader = new TextFieldParser(fileName); reader.HasFieldsEnclosedInQuotes = true; reader.SetDelimiters(","); string[] fields; HashSet <String> nameSet = new HashSet <string>(); while (!reader.EndOfData) { fields = reader.ReadFields(); nameSet.Add(fields[fieldNo]); } nameSet.Remove(columnName); // load columnName to tempTable using (var writer = conn.BeginBinaryImport(String.Format("COPY {0} (newname) FROM STDIN (FORMAT BINARY)", tempTable))) { foreach (var each in nameSet) { writer.StartRow(); writer.Write(each.ToString()); // Console.WriteLine(each.ToString()); } } Console.WriteLine("Loaded to temp table!"); String insertCmd = String.Format(@"insert into {0} (name) select distinct newname from {1} tmp where not exists (select * from {0} where tmp.newname = {0}.name) ", dataTable, tempTable); NpgsqlCommand cmd = new NpgsqlCommand(insertCmd, conn); cmd.CommandType = CommandType.Text; cmd.Connection = conn; cmd.ExecuteReader(); Console.WriteLine(String.Format("Column {0} of {1} is updated to {2}.", columnName, fileName, dataTable)); reader.Close(); conn.Close(); } }
private void ParseMultipleArguments(string input) { var parser = new TextFieldParser(new StringReader(input)) { TextFieldType = FieldType.Delimited }; parser.SetDelimiters(","); arguments = parser.ReadFields().Select(x => x.Trim()).ToList(); parser.Close(); }
private void readheader(string filename) { /* this is the key here is Microsoft VB library * * */ TextFieldParser par = new TextFieldParser(new StreamReader(@filename)); par.HasFieldsEnclosedInQuotes = true; par.SetDelimiters(","); string[] _values = null; int row = 0; listBoxSrc.Items.Clear(); while (!par.EndOfData) { _values = par.ReadFields(); string fldstring; if (row == 0) { int x = 0; foreach (string field in _values) { x++; if (field.IndexOf("'") > 0) { fldstring = field.Replace("'", "''"); } else { fldstring = field; } if (fldstring == "") { fldstring = string.Format("fld{0}", Convert.ToString(x)); } listBoxSrc.Items.Add(fldstring); //listBoxChng.Items.Add(fldstring); //lst.Add(fldstring); //listBoxChng.DataSource = lst; } string colcount = string.Format("# of column(s) : {0}", Convert.ToString(x)); labelcolcount.Text = colcount; } else { break; } row++; } par.Close(); }
private bool ParseCSVFile(string filename) { if (!File.Exists(filename)) { return(false); } string[] fieldHeadings = new string[] {}; try { TextFieldParser parser = new TextFieldParser(filename); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { // processing fields in row string[] fields = parser.ReadFields(); // get header fields // line number increments after first read if (parser.LineNumber == 2) { fieldHeadings = fields; continue; } if (fields.Count() != fieldHeadings.Count()) { continue; } // get each field value int index = 0; var rateItem = new Dictionary <string, string>(); foreach (string field in fields) { rateItem.Add(fieldHeadings[index], field); index++; } // add to list of items RateItems.Add(rateItem); } parser.Close(); } catch { return(false); } return(true); }
private static void meh() { string[] delimiters = { "," }; string[] fields; TextFieldParser tfp; tfp = new TextFieldParser(""); tfp.HasFieldsEnclosedInQuotes = true; tfp.Delimiters = delimiters; while (!tfp.EndOfData) { fields = tfp.ReadFields(); } tfp.Close(); // The k-Nearest Neighbors algorithm can be used with // any kind of data. In this example, we will see how // it can be used to compare, for example, Strings. string[] inputs = { "Car", // class 0 "Bar", // class 0 "Jar", // class 0 "Charm", // class 1 "Chair" // class 1 }; int[] outputs = { 0, 0, 0, // First three are from class 0 1, 1, // And next two are from class 1 }; // Now we will create the K-Nearest Neighbors algorithm. For this // example, we will be choosing k = 1. This means that, for a given // instance, only its nearest neighbor will be used to cast a new // decision. // In order to compare strings, we will be using Levenshtein's string distance KNearestNeighbors <string> knn = new KNearestNeighbors <string>(k: 1, classes: 2, inputs: inputs, outputs: outputs, distance: Distance.Levenshtein); // After the algorithm has been created, we can use it: int answer = knn.Compute("Chars"); // answer should be 1. }
private List <BenRecord> GetFileList() { List <BenRecord> downloadList = new List <BenRecord>(); string dirFile = localPath + "\\bendir.txt"; try { //delete the existing dir file if one exists. //if (FileSystem.FileExists(dirFile)) // FileSystem.DeleteFile(dirFile); //build new dir files. BuildBenLinkDirINI(); ExecBenCommand(); // build list of records to download // todo: build an algroithm for rollover of record numbers if (FileSystem.FileExists(dirFile)) { TextFieldParser dirReader = FileSystem.OpenTextFieldParser(dirFile, new string[] { "\t" }); string[] curRow; while (!dirReader.EndOfData) { curRow = dirReader.ReadFields(); if (Convert.ToInt32(curRow[2]) < BENMAXFILESIZE) { //Program.Log("DateTime from GetFile List for "+Convert.ToInt32(curRow[0]).ToString() +": " + Convert.ToDateTime(curRow[1]).ToString()); BenRecord curRecord = new BenRecord(Convert.ToInt32(curRow[0]), Convert.ToDateTime(curRow[1]), Convert.ToInt32(curRow[2])); downloadList.Add(curRecord); } else { SendFileTooLargeEmailNotification("Record id: " + curRow[0]); } } dirReader.Close(); } else { throw new Exception("GetFileList Error: " + siteName + " - dir file does not exist."); } } catch (Exception ex) { Program.Log("GetFileList Error: " + siteName + " - " + ex.ToString()); throw new Exception("GetFileList Error: " + siteName + " - " + ex.ToString()); } return(downloadList); }
public async Task Mask( string inputFilePath, string outputFilePath, IEnumerable <int> columnIndexToMask, char delimiter = ',', bool isFirstRowHeader = true, Func <IEnumerable <string>, bool> skipRow = null) { TextFieldParser parser = null; StreamWriter outputFile = null; try { parser = ReadFileWithParser(inputFilePath, delimiter); outputFile = OpenFile(outputFilePath); while (!parser.EndOfData) { if (isFirstRowHeader && parser.LineNumber == 1) { var header = parser.ReadFields(); await WriteOutputFile(header, delimiter, outputFile); } var tokens = parser.ReadFields(); if (skipRow?.Invoke(tokens) == true) { continue; } foreach (var col in columnIndexToMask) { var token = tokens.ElementAt(col); var masked = algorithm.Mask(token); tokens.SetValue(masked, col); } await WriteOutputFile(tokens, delimiter, outputFile); } } finally { if (parser != null) { parser.Close(); } if (outputFile != null) { await outputFile.DisposeAsync(); } } }
public bool isnull() { string mydocs = System.Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); string folder = "ProyectoF"; string R = mydocs + '\\' + folder; using (TextFieldParser Datos = new TextFieldParser(R + "\\Tabla.txt")) { Datos.TextFieldType = FieldType.Delimited; Datos.SetDelimiters(","); string[] Escrito = Datos.ReadFields(); if (Escrito == null) { Datos.Close(); return(true); } else { Datos.Close(); return(false); } } }
protected static string[] GetParts(string row) { TextFieldParser parser = new TextFieldParser(new StringReader(row)) { HasFieldsEnclosedInQuotes = true }; parser.SetDelimiters(","); var fields = parser.ReadFields(); parser.Close(); return(fields); }