public ProviderMgr() { TextFieldParser parser = new TextFieldParser(@"" + Directory.GetCurrentDirectory() + "\\dnscrypt-resolvers.csv"); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { ProviderItem providerItem = new ProviderItem(); string[] fields = parser.ReadFields(); providerItem.setName(fields[0]); providerItem.setFullName(fields[1]); providerItem.setDescription(fields[2]); providerItem.setLocation(fields[3]); providerItem.setCoordinates(fields[4]); providerItem.setURL(fields[5]); providerItem.setVersion(fields[6]); providerItem.setDNSSEC(fields[7]); providerItem.setNoLogs(fields[8]); providerItem.setNamecoin(fields[9]); providerItem.setAddress(fields[10]); providerItem.setProviderName(fields[11]); providerItem.setPublicKey(fields[12]); providerItem.setPublicKeyTXT(fields[13]); providerList.Add(providerItem); } parser.Close(); // Remove first line from CVS (Name, etc, etc) providerList.RemoveAt(0); }
public ReportFile(string path) { fileInfo = new FileInfo(path); if (!fileInfo.Exists) { CreateReportFile(fileInfo); } var parser = new TextFieldParser(path) { Delimiters = new[] { "," } }; if (!parser.EndOfData) { var headerFields = parser.ReadFields(); var langList = new List<string>(); // skip Date/Time and Word Count column headers for (var i = 2; i < headerFields.Length; ++i) { var lang = headerFields[i]; langList.Add(lang); langs.Add(lang); } while (!parser.EndOfData) { rows.Add(new ReportRow(langList.ToArray(), parser.ReadFields())); } } parser.Close(); }
public static void ImportCSV(string file) { TextFieldParser parser = new TextFieldParser(file); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); Boolean header = true; while (!parser.EndOfData) { // We don't want to import the header row if (header) { header = false; parser.ReadFields(); continue; } // Vars DateTime dt; DateTime dt2 = Convert.ToDateTime("4/15/2010"); Boolean error = false; string[] fields = parser.ReadFields(); // Check for error conditions if (String.IsNullOrEmpty(fields[3])) { // Score is null fields[3] = "0"; error = true; } if (!DateTime.TryParse(fields[2], out dt)) { // Date is invalid fields[2] = "05/05/55"; error = true; } if (dt > dt2) { // Date is > 4/15/2010 error = true; } // Insert into the correct table if (error) { InsertError(fields[0], fields[1], fields[2], fields[3]); } else { Insert(fields[0], fields[1], fields[2], fields[3]); } } parser.Close(); }
public List<string[]> parsecsv(string path) { List<string[]> parsedData = new List<string[]>(); string[] fields; try { //TODO add check to see if the csv file is a valid one or validate data in each field TextFieldParser parser = new TextFieldParser(path); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); parser.TrimWhiteSpace = true;//trims white space from the strings int numExcludedRows = 0; while (!parser.EndOfData) { fields = parser.ReadFields(); //check to see if the row has any blank fields if (fields.Contains("")) { //do nothing with the string numExcludedRows++; } else //the string doesn't include blank fields { parsedData.Add(fields); } } if (numExcludedRows > 0) { MessageBox.Show("Some rows were incomplete, removed data for " + numExcludedRows.ToString() + " ties.", "SkyRise Canopy Creator"); } //remove the headers from the List if (parsedData.Count() >= 2) { parsedData.RemoveAt(0); } //close the reader parser.Close(); } catch (Exception e) { MessageBox.Show(e.Message); } return parsedData; }
public void ProcessFile() { TextFieldParser tfp = new TextFieldParser(this.Filename); tfp.HasFieldsEnclosedInQuotes = true; tfp.Delimiters = new string[] { ",", "\t" }; string[] line; while (!tfp.EndOfData) { line = tfp.ReadFields(); mLines.Add(line); } tfp.Close(); }
static AnimationData() { Fallback = new Dictionary<ushort, ushort>(); NameToId = new Dictionary<string, ushort>(); IdToName = new Dictionary<ushort, string>(); PlayThenStop = new HashSet<ushort>(); PlayBackwards = new HashSet<ushort>(); var assembly = Assembly.GetExecutingAssembly(); var embeddedStream = assembly.GetManifestResourceStream("M2Lib.src.csv.AnimationData.csv"); Debug.Assert(embeddedStream != null, "Could not open embedded ressource AnimationData"); var csvParser = new TextFieldParser(embeddedStream) {CommentTokens = new[] {"#"}}; csvParser.SetDelimiters(","); csvParser.HasFieldsEnclosedInQuotes = true; csvParser.ReadLine(); // Skip first line while (!csvParser.EndOfData) { var fields = csvParser.ReadFields(); Debug.Assert(fields != null); var id = Convert.ToUInt16(fields[0]); var name = fields[1]; var fallback = Convert.ToUInt16(fields[3]); Fallback[id] = fallback; NameToId[name] = id; IdToName[id] = name; } csvParser.Close(); ushort[] playThenStopValues = { NameToId["Dead"], NameToId["SitGround"], NameToId["Sleep"], NameToId["KneelLoop"], NameToId["UseStandingLoop"], NameToId["Drowned"], NameToId["LootHold"] }; foreach (var value in playThenStopValues) PlayThenStop.Add(value); ushort[] playBackwardsValues = { NameToId["Walkbackwards"], NameToId["SwimBackwards"], NameToId["SleepUp"], NameToId["LootUp"] }; foreach (var value in playBackwardsValues) PlayBackwards.Add(value); //TODO FIXME There a loops by following the fallbacks in AnimationData.dbc. Happens with Close and FlyClose. Fallback[146] = 0;//Close Fallback[375] = 0;//FlyClose }
public static int Read(String fileName) { var result = new List<String[]>(); TextFieldParser parser = null; try { parser = new TextFieldParser(fileName, System.Text.Encoding.UTF8); parser.SetDelimiters(new[] { "," }); while (!parser.EndOfData) { // TODO CRLFで複数行に分割され、空行が存在する場合に対応できず var fields = parser.ReadFields(); // TODO カラム数のチェック Debug.WriteLine("----------------------"); foreach (var item in fields.ToList()) { Debug.Write(item); Debug.WriteLine("**"); } } } catch (System.IO.FileNotFoundException ex) { } catch (MalformedLineException ex) { // 解析不能例外 } catch (Exception ex) { throw; } finally { if (parser != null) { parser.Close(); } } return 0; }
/// <summary> /// CSVファイルを読み込みます /// </summary> /// <param name="csvFileName"> /// CSVパス /// </param> /// <returns> /// CSVファイル内の行データの配列 /// </returns> /// <exception cref="ApplicationException"> /// </exception> public static IEnumerable<IEnumerable<string>> ReadCsv(string csvFileName) { var csvRecords = new List<IEnumerable<string>>(); // Shift JISで読み込む var tfp = new TextFieldParser( csvFileName, System.Text.Encoding.GetEncoding(932)) { TextFieldType = FieldType.Delimited, Delimiters = new[] {","}, HasFieldsEnclosedInQuotes = true, TrimWhiteSpace = true }; // フィールドが文字で区切られているとする // デフォルトでDelimitedなので、必要なし // 区切り文字を,とする // フィールドを"で囲み、改行文字、区切り文字を含めることができるか // デフォルトでtrueなので、必要なし // フィールドの前後からスペースを削除する // デフォルトでtrueなので、必要なし try { while (!tfp.EndOfData) { // フィールドを読み込む var fields = tfp.ReadFields(); // 保存 csvRecords.Add(fields); } } catch (MalformedLineException ex) { throw new ApplicationException("Line " + ex.Message + " is invalid. Skipping"); } finally { // 後始末 tfp.Close(); } return csvRecords; }
static void Main(string[] args) { TextFieldParser csvG = new TextFieldParser(@"E:\Stocks\Archive\AAPL.csv"); csvG.TextFieldType = FieldType.Delimited; // set delimiter csvG.SetDelimiters("/r/n"); while (!csvG.EndOfData) { string[] fields = csvG.ReadFields(); foreach (string field in fields) { Console.WriteLine(field); } } csvG.Close(); Console.ReadLine(); }
public static IEnumerable<Order> Orders() { TextFieldParser parser = new TextFieldParser(@"order_data.csv"); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { //Processing row string[] fields = parser.ReadFields(); yield return new Order(productMap[fields[0]], traderMap[fields[1]], fields[2] == "0" ? OrderSide.Buy : OrderSide.Sell, decimal.Parse(fields[3]), decimal.Parse(fields[4])); } parser.Close(); }
public static void ImportCSV(string file) { TextFieldParser parser = new TextFieldParser(file); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); Boolean header = true; while (!parser.EndOfData) { // We don't want to import the header row if (header) { header = false; parser.ReadFields(); continue; } string[] fields = parser.ReadFields(); Insert(fields[0], fields[1], fields[2], fields[3]); } parser.Close(); }
private bool isFileValid(string fileName, out List<string> errors) { //var reader = new StreamReader(File.OpenRead(fileName)); var reader = new TextFieldParser(fileName); reader.TextFieldType = FieldType.Delimited; reader.SetDelimiters(GlobalConst.IMPORT_FILE_DELIMITER); int columnCount = 0; int rowCount = 0; bool fileValid = true; errors = new List<string>(); try { while (!reader.EndOfData) { string[] values = reader.ReadFields(); //Check file valid if (rowCount > 1 && columnCount != values.Length) { fileValid = false; errors.Add(String.Format("Found an inconsistent column length in file at line {0}.", rowCount)); reader.Close(); return fileValid; } columnCount = values.Length; rowCount++; } } catch (Exception ex) { throw ex; } finally { reader.Close(); } return fileValid; }
public ActionResult Upload(int? id) { Ingredient ingredient = new Ingredient(); TextFieldParser parser = new TextFieldParser(@"C:\Users\Nicholas\Documents\Visual Studio 2015\Projects\Ingredients\test.csv"); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { //Process row string[] fields = parser.ReadFields(); ingredient.Name = fields[0]; ingredient.Quantity = Convert.ToDecimal (fields[1]); ingredient.Units = fields[2]; ingredient.Sku = fields[3]; ingredient.Supplier = fields[4]; if (ModelState.IsValid) { db.Ingredients.Add(ingredient); db.SaveChanges(); } } parser.Close(); return RedirectToAction("Index"); }
private static void TransactionsAndClosingBalanceFromCSVFile(FileInfo fileInfo, out List<FineAntsCore.Transaction> transactions, out int closingBalance) { transactions = new List<FineAntsCore.Transaction>(); closingBalance = 0; TextFieldParser parser = new TextFieldParser(fileInfo.FullName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); // Post Office CSV files seem to have a line full of null characters at the end of them. Skip this line. if (fields.Length == 1 && fields[0][0] == '\0') { continue; } // Generate a transaction from the line. FineAntsCore.Transaction transaction = TransactionFromCSVFields(fields); // Add it to the list. transactions.Add(transaction); // The 4th column holds the running total, and the file is sorted newest to oldest, so if this is the first line (1-based, and LineNumber is the line to be read next), store the balance as the closing balance. if (parser.LineNumber == 2) { closingBalance = AmountFromString(fields[3]); } } parser.Close(); // Finally, sort the transactions on date, since they're in the wrong order in the CSV. transactions.Sort(new FineAntsCore.TransactionDateComparer()); }
/// <summary> /// Reading in the data from the csv file /// </summary> public static void readData() { //========================================== // Reading in the cemetery data //========================================== TextFieldParser parser = new TextFieldParser(@"data3.csv"); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); int i = 0; //Looping through each line while (!parser.EndOfData) { //Reading the line string[] fields = parser.ReadFields(); //Skipping the first and second row i++; if (i == 1 || i == 2) continue; //Creating a new cemetery object cemetery c = new cemetery(); c.cemetery_id = int.Parse(fields[0]); c.name = fields[1]; c.lat = float.Parse(fields[2]); c.lon = float.Parse(fields[3]); c.city = fields[4]; c.state = fields[5]; c.country = fields[6]; //Reading in the temperature data List<double> temps = new List<double>(); int j = 0; foreach (string f in fields) { j++; if (j <= 7) continue; temps.Add(double.Parse(f)); } c.temps = temps; //Adding the cemetery to the list cems.Add(c.cemetery_id, c); cemsList.Add(c); } //Closing the parser parser.Close(); }
static IEnumerable<ProcessMonitorEntry> Parse(string path, bool header = true) { var parser = new TextFieldParser(path); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); if (header) { parser.ReadLine(); } while (!parser.EndOfData) { var fields = parser.ReadFields(); yield return new ProcessMonitorEntry { //TimeOfDay = fields[0], //ProcessName = fields[1], //PID = fields[2], //Operation = fields[3], Path = fields[4], Result = fields[5], //Details = fields[6] }; } parser.Close(); }
private static void TransactionsFromCSVFile(FileInfo fileInfo, out List<FineAntsCore.Transaction> transactions) { transactions = new List<FineAntsCore.Transaction>(); TextFieldParser parser = new TextFieldParser(fileInfo.FullName); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); // Skip the first line, as it is just the headers. parser.ReadLine(); while (!parser.EndOfData) { string[] fields = parser.ReadFields(); // Generate a transaction from the line. FineAntsCore.Transaction transaction = TransactionFromCSVFields(fields); // Add it to the list. transactions.Add(transaction); } parser.Close(); // Finally, sort the transactions on date, since they're in the wrong order in the CSV. transactions.Sort(new FineAntsCore.TransactionDateComparer()); }
// Käytetään VB Parseria CSV-datan lukemiseen koska meillä on "arvo;arvo" sarakkeita private static List<string[]> LueCSVDataa(string tiedosto, char separator) { var csvData = new List<string[]>(); try { TextFieldParser parser = new TextFieldParser(tiedosto); parser.HasFieldsEnclosedInQuotes = true; parser.SetDelimiters(";"); string[] fields; while (!parser.EndOfData) { fields = parser.ReadFields(); csvData.Add(fields); } parser.Close(); } catch (Exception e) { MessageBox.Show("Virhe csv-tiedostoa luettaessa: " + e.Message); } return csvData; }
public static void Main(string[] args) { // HARD_CODED FOR EXAMPLE ONLY - TO BE RETRIEVED FROM APP.CONFIG IN REAL PROGRAM string hospPath = @"C:\\events\\inbound\\OBLEN_COB_Active_Inv_Advi_Daily_.csv"; string vendPath = @"C:\\events\\outbound\\Advi_OBlen_Active_Inv_Ack_Daily_.csv"; List <DenialRecord> hospList = new List <DenialRecord>(); List <DenialRecord> vendList = new List <DenialRecord>(); //List<DenialRecord> hospExcpt = new List<DenialRecord>(); // Created at point of use for now //List<DenialRecord> vendExcpt = new List<DenialRecord>(); // Created at point of use for now using (TextFieldParser hospParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(hospPath)) { hospParser.TextFieldType = FieldType.Delimited; hospParser.SetDelimiters(","); hospParser.HasFieldsEnclosedInQuotes = false; hospParser.TrimWhiteSpace = true; while (!hospParser.EndOfData) { try { string[] row = hospParser.ReadFields(); if (row.Length <= 7) { DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]); hospList.Add(dr); } } catch (Exception e) { // do something Console.WriteLine("Error is: {0}", e.ToString()); } } hospParser.Close(); hospParser.Dispose(); } using (TextFieldParser vendParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(vendPath)) { vendParser.TextFieldType = FieldType.Delimited; vendParser.SetDelimiters(","); vendParser.HasFieldsEnclosedInQuotes = false; vendParser.TrimWhiteSpace = true; while (!vendParser.EndOfData) { try { string[] row = vendParser.ReadFields(); if (row.Length <= 7) { DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]); vendList.Add(dr); } } catch (Exception e) { // do something Console.WriteLine("Error is: {0}", e.ToString()); } } vendParser.Close(); vendParser.Dispose(); } // Compare the lists each way for denials not in the other source List <DenialRecord> hospExcpt = hospList.Except(vendList).ToList(); List <DenialRecord> vendExcpt = vendList.Except(hospList).ToList(); }
private void FormMain_DragDrop(object sender, DragEventArgs e) { string[] files = (string[])e.Data.GetData(DataFormats.FileDrop); string outputFileName = ""; switch (mOutputType) { case TYPE_CPP: outputFileName = files[0].Replace("csv", "h"); break; case TYPE_CS: outputFileName = files[0].Replace("csv", "cs"); break; case TYPE_JAVA: outputFileName = files[0].Replace("csv", "java"); break; } StreamWriter writer = new StreamWriter(outputFileName); StreamWriter writerSid = new StreamWriter(files[0].Replace("csv", "sid")); switch (mOutputType) { case TYPE_CPP: writer.WriteLine("#pragma once"); writer.WriteLine(""); break; case TYPE_CS: writer.WriteLine("namespace Slog"); writer.WriteLine("{"); writer.WriteLine("\tclass Id"); writer.WriteLine("\t{"); break; case TYPE_JAVA: writer.WriteLine("public class Id"); writer.WriteLine("{"); break; } TextFieldParser parser = new TextFieldParser(files[0]); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); int id = 1; while (parser.EndOfData == false) { string[] row = parser.ReadFields(); string output = ""; if (row.GetLength(0) != 2) continue; switch (mOutputType) { case TYPE_CPP: output = "#define " + row[0] + " " + id; break; case TYPE_CS: output = "\t\tpublic const int " + row[0] + " = " + id + ";"; break; case TYPE_JAVA: output = "\tpublic static final int " + row[0] + " = " + id + ";"; break; } writer. WriteLine(output); writerSid.WriteLine(id + "," + row[1]); id++; } switch (mOutputType) { case TYPE_CPP: break; case TYPE_CS: writer.WriteLine("\t}"); writer.WriteLine("}"); break; case TYPE_JAVA: writer.WriteLine("}"); break; } parser. Close(); writer. Close(); writerSid.Close(); }
private List<EmailFileRecord> readFile(string fileName, int firstNameIndex, int lastNameIndex, int emailIndex) { var reader = new TextFieldParser(fileName); reader.TextFieldType = FieldType.Delimited; reader.SetDelimiters(GlobalConst.IMPORT_FILE_DELIMITER); int skipCount = 0; int rowCount = 0; List<EmailFileRecord> lines = new List<EmailFileRecord>(); Regex emailRegex = new Regex(@"^([0-9a-zA-Z]([-.\w]*[0-9a-zA-Z])*@([0-9a-zA-Z][-\w]*[0-9a-zA-Z]\.)+[a-zA-Z]{2,9})$", RegexOptions.IgnoreCase); try { while (!reader.EndOfData) { string[] values = reader.ReadFields(); string email = values[emailIndex]; bool emailValid = true; if (email == null || !emailRegex.IsMatch(email)) { emailValid = false; skipCount++; } if (emailValid) { string fname = ""; string lname = ""; if (firstNameIndex == -1) { fname = "Fashionista"; lname = "Jones"; } else if (firstNameIndex == lastNameIndex) { string[] nameVals = values[firstNameIndex].Split(GlobalConst.IMPORT_FILE_DELIMITER[0]); fname = nameVals[0]; lname = (nameVals.Length > 2) ? lname = nameVals[1] : "Jones"; } else { fname = values[firstNameIndex]; lname = values[lastNameIndex]; } EmailFileRecord record = new EmailFileRecord(fname, lname, email, "TODO: ADD This"); lines.Add(record); rowCount++; } log(String.Format("PR {0}, VR {1}", (rowCount + skipCount), rowCount)); } } catch (Exception ex) { throw ex; } finally { reader.Close(); } log(String.Format("SKIPPED {0} Records", skipCount)); return lines; }
private void buttonStart_Click(object sender, EventArgs e) { if (twitpicfiles == null) { MessageBox.Show("please select twitpic folder ", "error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } if (textBoxTwitlog.Text == "") { MessageBox.Show("please select twitter csv file ", "error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } string exportstr = ""; int htmlcount = 0; int exportcount = 0; textBoxLog.AppendText("twitter log csv file:" + textBoxTwitlog.Text + Environment.NewLine); TextFieldParser parser = new TextFieldParser(textBoxTwitlog.Text, Encoding.GetEncoding("UTF-8")); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (parser.EndOfData == false) { string[] tweetdata = parser.ReadFields(); if ((!CheckRT(tweetdata)) && (CheckTwitpic(tweetdata))) { string[] picstr = CheckPicDict(tweetdata, twitpicdic, twitpicfiles); if (picstr.Length > 0) { string htm = GenHtml(tweetdata, textBoxpicurl.Text, textBoxTwitterurl.Text, textBoxTwilogurl.Text, textBoxSearchdomain.Text, textBoxSearchname.Text, twitpicdic, twitpicfiles); System.IO.StreamWriter sw = new System.IO.StreamWriter(tweetdata[tweet_id] + ".htm", false, System.Text.Encoding.GetEncoding("UTF-8")); sw.Write(htm); sw.Close(); htmlcount++; exportstr += GenExport(textBoxAuthor.Text, textBoxTitle.Text, textBoxStatus.Text, textBoxAllowComments.Text, textBoxConvertBreaks.Text, textBoxAllowPings.Text, textBoxCategory.Text, tweetdata[timestamp], htm, "", "", textBoxKeywords.Text); } } if ((htmlcount % 100) == 0) { System.Text.Encoding utfenc = new System.Text.UTF8Encoding(false); int utfsize = utfenc.GetByteCount(exportstr); if (utfsize > postsizelimit) { System.IO.StreamWriter swexportdivided = new System.IO.StreamWriter( "post" + exportcount.ToString("00") + ".txt", false, utfenc); swexportdivided.Write(exportstr); swexportdivided.Close(); exportstr = ""; exportcount++; } labelline.Text = htmlcount + " html files " + exportcount + " text files"; Application.DoEvents(); } } parser.Close(); System.Text.Encoding enc = new System.Text.UTF8Encoding(false); // without BOM System.IO.StreamWriter swexport = new System.IO.StreamWriter( "post" + exportcount.ToString("00") + ".txt", false, enc); swexport.Write(exportstr); swexport.Close(); exportcount++; labelline.Text = htmlcount + " html files " + exportcount + " text files"; textBoxLog.Text += htmlcount + " html files " + exportcount + " text files" + Environment.NewLine; }
private void Button6_Click(System.Object sender, System.EventArgs e) { // Define the Column Definition DataTable dt = new DataTable(); dt.Columns.Add("OrderID", typeof(int)); dt.Columns.Add("CustomerID", typeof(string)); dt.Columns.Add("EmployeeID", typeof(int)); dt.Columns.Add("OrderDate", typeof(System.DateTime)); dt.Columns.Add("RequiredDate", typeof(System.DateTime)); dt.Columns.Add("ShippedDate", typeof(System.DateTime)); dt.Columns.Add("ShipVia", typeof(int)); dt.Columns.Add("Freight", typeof(decimal)); dt.Columns.Add("ShipName", typeof(string)); dt.Columns.Add("ShipAddress", typeof(string)); dt.Columns.Add("ShipCity", typeof(string)); dt.Columns.Add("ShipRegion", typeof(string)); dt.Columns.Add("ShipPostalCode", typeof(string)); dt.Columns.Add("ShipCountry", typeof(string)); using (cn == new SqlConnection("Server='Server_Name';Database='Database_Name';Trusted_Connection=True;")) { cn.Open(); Microsoft.VisualBasic.FileIO.TextFieldParser reader = default(Microsoft.VisualBasic.FileIO.TextFieldParser); string[] currentRow = null; DataRow dr = default(DataRow); string sqlColumnDataType = null; reader = My.Computer.FileSystem.OpenTextFieldParser("C:\\Users\\Excel\\Desktop\\OrdersTest.csv"); reader.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; reader.Delimiters = new string[] { "," }; while (!reader.EndOfData) { try { currentRow = reader.ReadFields(); dr = dt.NewRow(); for (currColumn = 0; currColumn <= dt.Columns.Count - 1; currColumn++) { sqlColumnDataType = dt.Columns(currColumn).DataType.Name; switch (sqlColumnDataType) { case "String": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToString(currentRow(currColumn)); } break; case "Decimal": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = 0; } else { dr.Item(currColumn) = Convert.ToDecimal(currentRow(currColumn)); } break; case "DateTime": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToDateTime(currentRow(currColumn)); } break; } } dt.Rows.Add(dr); } catch (Microsoft.VisualBasic.FileIO.MalformedLineException ex) { Interaction.MsgBox("Line " + ex.Message + "is not valid." + Constants.vbCrLf + "Terminating Read Operation."); reader.Close(); reader.Dispose(); //Return False } finally { dr = null; } } using (SqlBulkCopy copy = new SqlBulkCopy(cn)) { copy.DestinationTableName = "[dbo].[Orders]"; copy.WriteToServer(dt); } } }
private static string[] GetFields(string line) { var parser = new TextFieldParser(new StringReader(line)) { HasFieldsEnclosedInQuotes = true }; parser.SetDelimiters(","); try { string[] fields = null; if (!parser.EndOfData) fields = parser.ReadFields(); return fields; } catch (Exception ex) { return null; } finally { parser.Close(); } }
/// <summary> /// テクスチャの読み込み /// </summary> private void LoadContent() { #region フォント読み込み font = content.Load<SpriteFont>("Ranking/RankFont"); fontmath = content.Load<SpriteFont>("Ranking/RankFontMath"); #endregion #region テクスチャ読み込み background = content.Load<Texture2D>("Ranking/background"); ranking = content.Load<Texture2D>("Ranking/ranking"); rankingS = content.Load<Texture2D>("Ranking/rankingS"); rankingR = content.Load<Texture2D>("Ranking/rankingR"); rankingB = content.Load<Texture2D>("Ranking/rankingB"); rankbar1 = content.Load<Texture2D>("Ranking/rankbar1"); rankbar2 = content.Load<Texture2D>("Ranking/rankbar2"); rankbar3 = content.Load<Texture2D>("Ranking/rankbar3"); rankbarnow = content.Load<Texture2D>("Ranking/rankbarnow"); name = content.Load<Texture2D>("Ranking/name"); time = content.Load<Texture2D>("Ranking/time"); newrecord = content.Load<Texture2D>("Ranking/newrecord"); #endregion #region CSVファイル読み込み TextFieldParser parser; switch (selectStage) { case StageSelect.SelectStage.Small: parser = new TextFieldParser(@"../../../../MazeEscaperContent/Ranking/RankerS.csv", System.Text.Encoding.GetEncoding("Shift_JIS")); break; case StageSelect.SelectStage.Regular: parser = new TextFieldParser(@"../../../../MazeEscaperContent/Ranking/RankerR.csv", System.Text.Encoding.GetEncoding("Shift_JIS")); break; case StageSelect.SelectStage.Big: parser = new TextFieldParser(@"../../../../MazeEscaperContent/Ranking/RankerB.csv", System.Text.Encoding.GetEncoding("Shift_JIS")); break; default: parser = new TextFieldParser(@"../../../../MazeEscaperContent/Ranking/Ranker.csv", System.Text.Encoding.GetEncoding("Shift_JIS")); break; } parser.TextFieldType = FieldType.Delimited; //区切り文字はコンマ parser.SetDelimiters(","); //文末まで読み込む while (!parser.EndOfData) { for (int i = 0; i < 3; i++) { string[] row = parser.ReadFields(); for (int j = 0; j < 2; j++) { csvstr[i, j] = row[j]; } } } parser.Close(); #endregion }
public void CloseTest() { TextFieldParser t = new TextFieldParser (new System.IO.MemoryStream()); t.Close (); t.Close (); }
public static bool GetCsvData(string CSVFileName, ref DataTable CSVTable) { Microsoft.VisualBasic.FileIO.TextFieldParser reader = default(Microsoft.VisualBasic.FileIO.TextFieldParser); string[] currentRow = null; DataRow dr = default(DataRow); string sqlColumnDataType = null; reader = My.Computer.FileSystem.OpenTextFieldParser(CSVFileName); reader.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; reader.Delimiters = new string[] { "," }; while (!reader.EndOfData) { try { currentRow = reader.ReadFields(); dr = CSVTable.NewRow(); for (currColumn = 0; currColumn <= CSVTable.Columns.Count - 1; currColumn++) { sqlColumnDataType = CSVTable.Columns(currColumn).DataType.Name; switch (sqlColumnDataType) { case "String": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToString(currentRow(currColumn)); } break; case "Decimal": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = 0; } else { dr.Item(currColumn) = Convert.ToDecimal(currentRow(currColumn)); } break; case "DateTime": if (string.IsNullOrEmpty(currentRow(currColumn))) { dr.Item(currColumn) = ""; } else { dr.Item(currColumn) = Convert.ToDateTime(currentRow(currColumn)); } break; } } CSVTable.Rows.Add(dr); } catch (Microsoft.VisualBasic.FileIO.MalformedLineException ex) { Interaction.MsgBox("Line " + ex.Message + "is not valid." + Constants.vbCrLf + "Terminating Read Operation."); reader.Close(); reader.Dispose(); return(false); } finally { dr = null; } } reader.Close(); reader.Dispose(); return(true); }
private bool ParseCSVFile(string filename) { if (!File.Exists(filename)) return false; string[] fieldHeadings = new string[]{}; try { TextFieldParser parser = new TextFieldParser(filename); parser.TextFieldType = FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { // processing fields in row string[] fields = parser.ReadFields(); // get header fields // line number increments after first read if (parser.LineNumber == 2) { fieldHeadings = fields; continue; } if (fields.Count() != fieldHeadings.Count()) continue; // get each field value int index = 0; var rateItem = new Dictionary<string, string>(); foreach (string field in fields) { rateItem.Add(fieldHeadings[index], field); index++; } // add to list of items RateItems.Add(rateItem); } parser.Close(); } catch { return false; } return true; }
public IList<Trade> Import(Stream stream, Market market, IList<Exception> exceps) { var trades = new List<Trade>(); // need to convert stream first var CsvStream = new MemoryStream(); DocumentConvererHelper.XlsToExt(stream, CsvStream, "csv", sheetIndex: 1); CsvStream.Position = 0; var parser = new TextFieldParser(CsvStream) { HasFieldsEnclosedInQuotes = true }; parser.SetDelimiters(Separator); var emptyline = parser.ReadLine(); // skip first line var titles = parser.ReadFields(); if (titles == null) return trades; //Build Positions var idx = 0; var headers = new Dictionary<string, int>(); foreach (var s in titles) headers[s.ToLowerInvariant()] = idx++; var cultInfo = GetCultureInfo(_params); var asOfDate = SimpleDate.Today; if (_params != null && _params.ContainsKey("AsOfDate")) { var ss = _params["AsOfDate"]; asOfDate = SimpleDate.Parse(ss); } while (!parser.EndOfData) { var items = parser.ReadFields(); var trade = FromString(items, headers, exceps, market, cultInfo, asOfDate); if (trade != null) trades.Add(trade); } parser.Close(); //Aggregate trades var allTrades = new List<Trade>(); foreach (var tr in trades) { bool found = false; foreach (var vv in allTrades) { if (vv.BookId != tr.BookId) continue; var vvDpeDesc = vv.GetProperty(DPEDescription); var trDpeDesc = tr.GetProperty(DPEDescription); if (vv.Product.PricingType.Equals("Swap") || vv.Product.PricingType.Equals("MTMCurrencySwap") || vv.Product.PricingType.Equals("FRA")) { if (vv.Product.Currency != tr.Product.Currency) continue; if (vv.Product.ContractMaturity != tr.Product.ContractMaturity) continue; var splits = vv.Product.Description.Split('('); var osplits = tr.Product.Description.Split('('); if (splits.Length > 0 && osplits.Length > 0) { if (splits[0] != osplits[0]) continue; } else continue; } else if (vv.Product is FXOption && vvDpeDesc != null && trDpeDesc != null) { if (!vvDpeDesc.Equals(trDpeDesc)) continue; } else if (vv.Product.Description != tr.Product.Description) continue; vv.Quantity += tr.Quantity; vv.SettleAmount += tr.SettleAmount; if (vv.Product is SymmetryProduct && tr.Product is SymmetryProduct) { (tr.Product as SymmetryProduct).DetailQuantity += (vv.Product as SymmetryProduct).DetailQuantity; } if (vv.Product is FX) { var fx1 = vv.Product as FX; var fx2 = tr.Product as FX; fx1.PrimaryAmount += fx2.PrimaryAmount; fx1.QuotingAmount += fx2.QuotingAmount; } found = true; break; } if (!found) allTrades.Add(tr); } return allTrades; }
// TODO: Move to common static method to share with IMDb private bool ParseCSVFile(string aFilename, out List<Dictionary<string, string>> aParsedCSV) { aParsedCSV = new List<Dictionary<string, string>>(); if (!File.Exists(aFilename)) return false; string[] lFieldHeadings = new string[] { }; int lRecordNumber = 0; try { var lParser = new TextFieldParser(aFilename) { TextFieldType = FieldType.Delimited }; lParser.SetDelimiters(","); while (!lParser.EndOfData) { lRecordNumber++; // processing fields in row string[] lFields = lParser.ReadFields(); // get header fields // line number increments after first read if (lRecordNumber == 1) { lFieldHeadings = lFields; continue; } if (lFields.Count() != lFieldHeadings.Count()) continue; // get each field value int lIndex = 0; var lExportItem = new Dictionary<string, string>(); foreach (string field in lFields) { lExportItem.Add(lFieldHeadings[lIndex], field); lIndex++; } // add to list of items aParsedCSV.Add(lExportItem); } lParser.Close(); } catch { return false; } return true; }
public Task<HttpResponseMessage> HandleWaypoints() { var provider = new MultipartMemoryStreamProvider(); var task = Request.Content.ReadAsMultipartAsync(provider).ContinueWith(o => { if (!Request.Content.IsMimeMultipartContent()) return error("Uploaded filed does not look like a waypoints file"); var data = new Dictionary<string, Dictionary<string, string>>(); foreach (var contents in provider.Contents) { var csvData = contents.ReadAsStreamAsync(); csvData.Wait(); var res = csvData.Result; var parser = new TextFieldParser(res); var readingHeaderRow = true; var id = -1; var lat = -1; var lng = -1; var x = -1; var y = -1; parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(","); while (!parser.EndOfData) { var fields = parser.ReadFields(); // Figure out which fields we want if (readingHeaderRow) { var ctr = 0; foreach (var f in fields) { if (f == "WP_") id = ctr; else if (f == "Longitude") lng = ctr; else if (f == "Latitude") lat = ctr; else if (f == "x_proj") x = ctr; else if (f == "y_proj") y = ctr; ctr++; } if (id == -1) return error("Could not find waypoint id column"); if (lat == -1) return error("Could not find lat column"); if (lng == -1) return error("Could not find long column"); if (x == -1) return error("Could not find projected x column"); if (y == -1) return error("Could not find projected y column"); readingHeaderRow = false; } else // Parse a data row { try { var dict = new Dictionary<string, string>(); dict["lat"] = fields[lat]; dict["long"] = fields[lng]; dict["x"] = fields[x]; dict["y"] = fields[y]; // Ids look like 1.00000.... sanitize it to 1 data[int.Parse(fields[id], NumberStyles.Any).ToString()] = dict; // Will clobber data if ids are reused } catch { return error("Could not parse waypoint file"); } } } parser.Close(); } var resp = new HttpResponseMessage(HttpStatusCode.OK); resp.Content = new StringContent(JsonConvert.SerializeObject(data), System.Text.Encoding.UTF8, "text/plain"); return resp; }); return task; }
static bool VerifyZipAndRegion() { ////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////// INITIALIZING VARIABLES ////////////////////////////////////////////////////////////////////////////////////////// string[] aryTableParsedLine; string[] aryDataParsedLine; string strDataLine; string strTableLine; string strEditedDataFile = strInputDataName.ToUpper().Replace(".CSV", "_EDITED.CSV"); string strCurrentZip; string strDataZipAndRegion; string strTableZipAndRegion; int iZipFieldIndex = 0; int iDataFields; bool bolFoundRegionMatch; int iInputRecords; int iEditedRecords; int iMismatchRecords; bool bolZipFieldFound = false; strRegionErrorsRemoved = strWorkingJobFolder + strJobNumber + " - Region Mismatches Removed.csv"; StreamReader streamInitialFileScan = new StreamReader(strInputDataName); StreamReader streamTableFile = new StreamReader(strZipToRegionTable); StreamWriter streamEditedDataFile = new StreamWriter(strEditedDataFile); StreamWriter streamRegionMismatches = new StreamWriter(strRegionErrorsRemoved); TextFieldParser parseDataFile = new TextFieldParser(strInputDataName); parseDataFile.TextFieldType = FieldType.Delimited; parseDataFile.SetDelimiters(","); try { ////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////// DETERMINING WHICH FIELD IN THE INPUT DATA CONTAINS THE ZIP CODE ////////////////////////////////////////////////////////////////////////////////////////// strDataLine = streamInitialFileScan.ReadLine(); aryDataParsedLine = strDataLine.Split(','); iDataFields = aryDataParsedLine.Length; for (int j = 0; j < iDataFields; j++) { if (aryDataParsedLine[j].ToString().ToUpper().Contains("ZIP")) { bolZipFieldFound = true; streamEditedDataFile.WriteLine(strDataLine); iZipFieldIndex = j; break; } } streamInitialFileScan.Close(); streamInitialFileScan.Dispose(); // Verifying that a zip code field exists in the input data file. if (!bolZipFieldFound) { LogFile("A Zip field is not included in the input data file.", true); return false; } ////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////// TESTING EACH RECORD AGAINST THE ZIP-REGION TABLE ////////////////////////////////////////////////////////////////////////////////////////// while (!parseDataFile.EndOfData) { bolFoundRegionMatch = false; strDataLine = parseDataFile.PeekChars(Int32.MaxValue); aryDataParsedLine = parseDataFile.ReadFields(); // Capturing the zip and region combination from the current record. strCurrentZip = aryDataParsedLine[iZipFieldIndex].ToString().Trim(); if (strCurrentZip.Length > 5) { strCurrentZip = strCurrentZip.Substring(0,5); } strDataZipAndRegion = strCurrentZip + strRegionCode; // Looping through the Zip and Region Lookup Table to see if a zip is within a valid region. while (!streamTableFile.EndOfStream) { strTableLine = streamTableFile.ReadLine(); aryTableParsedLine = strTableLine.Split(','); strTableZipAndRegion = aryTableParsedLine[0].ToString() + aryTableParsedLine[2].ToString().ToUpper().Trim(); if (strDataZipAndRegion == strTableZipAndRegion) { bolFoundRegionMatch = true; break; } } if (bolFoundRegionMatch) { streamEditedDataFile.WriteLine(strDataLine); } else { streamRegionMismatches.WriteLine(strDataLine); } streamTableFile.DiscardBufferedData(); streamTableFile.BaseStream.Position = 0; } } catch (Exception exception) { LogFile(exception.ToString(), true); return false; } finally { streamEditedDataFile.Close(); streamEditedDataFile.Dispose(); streamTableFile.Close(); streamTableFile.Dispose(); streamRegionMismatches.Close(); streamRegionMismatches.Dispose(); parseDataFile.Close(); parseDataFile.Dispose(); } ////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////// DETERMINING IF THE HIGHMARK THRESHOLD HAS BEEN REACHED ////////////////////////////////////////////////////////////////////////////////////////// try { // Calculating total number of input records. iInputRecords = File.ReadAllLines(strInputDataName).Length - 1; // Calculating total number of edited records. iEditedRecords = File.ReadAllLines(strEditedDataFile).Length - 1; // Calculating total number of mismatch records. iMismatchRecords = File.ReadAllLines(strRegionErrorsRemoved).Length - 1; if ((((decimal)iEditedRecords / (decimal)iInputRecords) * 100) <= iZipToRegionMismatchThreshold) { bolRegionMismatchThresholdReached = true; SendEmail("HIGHMARK"); LogFile("At least " + (100 - iZipToRegionMismatchThreshold).ToString() + "% of records submitted for processing were removed as Region-Zip mismatches.", true); return false; } else { if (iMismatchRecords > 1) { SendEmail("HIGHMARK"); } } } catch (Exception exception) { LogFile(exception.ToString(), true); return false; } strInputDataName = strEditedDataFile; return true; }
private bool ParseCSVFile(string filename, out List<Dictionary<string, string>> parsedCSV) { parsedCSV = new List<Dictionary<string, string>>(); if (!File.Exists(filename)) return false; string[] fieldHeadings = new string[]{}; int recordNumber = 0; try { TextFieldParser parser = new TextFieldParser(filename) { TextFieldType = FieldType.Delimited }; parser.SetDelimiters(","); while (!parser.EndOfData) { recordNumber++; // processing fields in row string[] fields = parser.ReadFields(); // get header fields // line number increments after first read if (recordNumber == 1) { fieldHeadings = fields; continue; } if (fields.Count() != fieldHeadings.Count()) continue; // get each field value int index = 0; var exportItem = new Dictionary<string, string>(); foreach (string field in fields) { exportItem.Add(fieldHeadings[index], field); index++; } // Set provider to web or csv exportItem.Add(IMDbFieldMapping.cProvider, "csv"); // add to list of items parsedCSV.Add(exportItem); } parser.Close(); } catch { return false; } return true; }