/// <summary> /// loads a simple csv from file all as string the DataDimension need to handle the conversion of data types (in order to extract rows they need the same type) /// </summary> /// <param name="path">csv path</param> /// <param name="tableName">the name for the dict in wich the data is stored</param> /// <param name="delimiter">seperation between cells</param> /// <param name="firstRowHeader">if the first row contains the headers</param> private void LoadDataServer(string path, string tableName, char delimiter, bool firstRowHeader = true) { GenericParsing.GenericParserAdapter parser = new GenericParsing.GenericParserAdapter(path) { ColumnDelimiter = delimiter, FirstRowHasHeader = firstRowHeader }; data[tableName] = parser.GetDataTable(); }
private void DoImport() { if (String.IsNullOrEmpty(txtFilename.Text)) { ErrorMessage.Show("You must select a file before proceeding!"); return; } int rowCount = 0; var service = new SupportService(User); using (var parser = new GenericParserAdapter(txtFilename.Text)) { parser.ColumnDelimiter = ','; parser.FirstRowHasHeader = chkFirstRowHeaders.IsChecked.GetValueOrDefault(false); parser.TextQualifier = '\"'; parser.FirstRowSetsExpectedColumnCount = true; var columnNames = new List<String>(); var values = new List<string>(); while (parser.Read()) { if (rowCount == 0) { for (int i = 0; i < parser.ColumnCount; ++i) { if (parser.FirstRowHasHeader) { columnNames.Add(parser.GetColumnName(i)); } else { columnNames.Add("Column" + i); } } } values.Clear(); for (int i = 0; i < parser.ColumnCount; ++i) { values.Add(parser[i]); } String strFullPath = null; if (values.Count == 0) { strFullPath = values[0]; } else { strFullPath = values.Join("\\"); } if (!String.IsNullOrWhiteSpace(strFullPath)) { service.GetDistributionIdFromPath(strFullPath); lblProgress.InvokeIfRequired(() => { lblProgress.Content = strFullPath; lblProgress.UpdateLayout(); }); rowCount++; } } } lblProgress.InvokeIfRequired(() => { lblProgress.Content = String.Format("{0} rows processed.", rowCount); }); btnCancel.Content = "_Close"; }
public DataTable ReadCsvAndConvertToDataTable(string filename) { var adapter = new GenericParsing.GenericParserAdapter(filename); var batchSummaryConverter = new BatchSummaryConverter(); DataTable originalDataTable = adapter.GetDataTable(); var newDataTable = batchSummaryConverter.Convert(originalDataTable); return(newDataTable); }
/// <summary> /// Reads a CSV file and returns a DataTable /// </summary> /// <param name="path"></param> /// <param name="isFirstRowHeader"></param> /// <returns></returns> public static DataTable GetDataTableFromCsv(string path, bool isFirstRowHeader) { path = Path.GetFullPath(path); var adapter = new GenericParsing.GenericParserAdapter(path, Encoding.ASCII); adapter.FirstRowHasHeader = true; DataTable dt = adapter.GetDataTable(); return(dt); }
private void Start() { process = GetProcess(configData.RScriptExexutablePath, configData.ScriptPath, Application.dataPath); process.Start(); process.WaitForExit(); GenericParsing.GenericParserAdapter parser = new GenericParsing.GenericParserAdapter(configData.DataSavePath) { ColumnDelimiter = configData.DataDelimiter }; System.Data.DataTable dsResult = parser.GetDataTable(); Debug.Log(dsResult.Rows[0][0]); }
private void CreateDataTableColumns(DataTable dtData) { // Add in our column to store off the file line number. if (this.m_blnIncludeFileLineNumber && (this.m_intCreatedColumns < 1)) { dtData.Columns.Add(GenericParserAdapter.FILE_LINE_NUMBER); } for (int intColumnIndex = this.m_intCreatedColumns; intColumnIndex < this.m_lstColumnNames.Count; ++intColumnIndex, ++this.m_intCreatedColumns) { GenericParserAdapter.AddColumnToTable(dtData, this.m_lstColumnNames[intColumnIndex]); } }
public override ImportRowSource CreateRowSource(IProgressObserver progress) { if (_options == null) { throw new Exception("Null or incorrect options type received!"); } ImportRowSource rowsource = null; using (var parser = new GenericParserAdapter(_options.Filename)) { parser.ColumnDelimiter = _options.Delimiter[0]; parser.FirstRowHasHeader = _options.FirstRowContainsNames; parser.TextQualifier = '\"'; parser.FirstRowSetsExpectedColumnCount = true; var service = new ImportStagingService(); var columnNames = new List<String>(); int rowCount = 0; service.BeginTransaction(); var values = new List<string>(); while (parser.Read()) { if (rowCount == 0) { for (int i = 0; i < parser.ColumnCount; ++i) { if (_options.FirstRowContainsNames) { columnNames.Add(parser.GetColumnName(i)); } else { columnNames.Add("Column" + i); } } service.CreateImportTable(columnNames); } values.Clear(); for (int i = 0; i < parser.ColumnCount; ++i) { values.Add(parser[i]); } service.InsertImportRow(values); rowCount++; } service.CommitTransaction(); rowsource = new ImportRowSource(service, rowCount); } return rowsource; }
private void Button_Click(object sender, RoutedEventArgs e) { System.Windows.Forms.OpenFileDialog ofd = new System.Windows.Forms.OpenFileDialog(); if (ofd.ShowDialog() == System.Windows.Forms.DialogResult.OK) { tbFileName.Text = ofd.FileName; using (GenericParserAdapter parser = new GenericParserAdapter(tbFileName.Text)) { parser.Load("prediction-format.xml"); System.Data.DataSet dsResult = parser.GetDataSet(); DisplayData = dsResult.Tables[0]; PopulateFromAndTo(); } } }
private static void PopulateProfile(Bin[, ,] dProfile, Matrix3D navigationMatrix) { DataTable dataFile = new DataTable(); try { // add the csv bin file using (GenericParserAdapter parser = new GenericParserAdapter(PATH_TO_BIN_PROFILE)) { System.Data.DataSet dsResult = parser.GetDataSet(); dataFile = dsResult.Tables[0]; } } catch { } for (int i = 1; i < dataFile.Rows.Count; i++) { //lab vale as got form profile index Point3D labBin = new Point3D(); labBin.X = Convert.ToDouble(dataFile.Rows[i][0].ToString()); labBin.Y = Convert.ToDouble(dataFile.Rows[i][1].ToString()); labBin.Z = Convert.ToDouble(dataFile.Rows[i][2].ToString()); //trasfered points Point3D labCoordinate = navigationMatrix.Transform(labBin); //gets the bin to fill up Bin actualBin = GetProfileBin(dProfile, labCoordinate); //bin RGB Value actualBin.binRGB.X = Convert.ToByte(dataFile.Rows[i][9].ToString()); actualBin.binRGB.Y = Convert.ToByte(dataFile.Rows[i][10].ToString()); actualBin.binRGB.Z = Convert.ToByte(dataFile.Rows[i][11].ToString()); //Measure Lab Values actualBin.measuredLAB.X = Convert.ToDouble(dataFile.Rows[i][3].ToString()); actualBin.measuredLAB.Y = Convert.ToDouble(dataFile.Rows[i][4].ToString()); actualBin.measuredLAB.Z = Convert.ToDouble(dataFile.Rows[i][5].ToString()); //measured XYZ Values actualBin.measuredXYZ.X = Convert.ToDouble(dataFile.Rows[i][6].ToString()); actualBin.measuredXYZ.Y = Convert.ToDouble(dataFile.Rows[i][7].ToString()); actualBin.measuredXYZ.Z = Convert.ToDouble(dataFile.Rows[i][8].ToString()); //is empty check actualBin.isEmpty = false; } }
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { var value = row[coldef.SourceColumnName]; if (String.IsNullOrEmpty(value)) { String alternate = null; if (coldef.SourceColumnName.Equals("decimalLatitude")) { alternate = row["verbatimLatitude"]; } else if (coldef.SourceColumnName.Equals("decimalLongitude")) { alternate = row["verbatimLongitude"]; } if (!String.IsNullOrEmpty(alternate)) { double? coord = GeoUtils.ParseCoordinate(alternate); if (coord.HasValue) { value = Math.Round(coord.Value, 2).ToString(); } } } return value; }
public static DataSet loadTxtToDataset(string path, string name) { // Setup the DataSet DataSet ds = new DataSet(); // Use Generic Txt Parser to load csv to dataset using (GenericParserAdapter parser = new GenericParserAdapter()) { parser.SetDataSource(path); parser.ColumnDelimiter = ','; parser.FirstRowHasHeader = true; parser.TextQualifier = '\"'; ds = parser.GetDataSet(); } ds.Tables[0].TableName = name; return ds; }
static void Main(string[] args) { var pathOfCsvFile = @"C:\MyFile.csv"; var adapter = new GenericParsing.GenericParserAdapter(pathOfCsvFile); DataTable data = adapter.GetDataTable(); }
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { var value = Default(coldef, row, extraData); if (!String.IsNullOrEmpty(value) && _valueMap.ContainsKey(value.ToLower())) { value = _valueMap[value.ToLower()]; } return value; }
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { var value = row[coldef.SourceColumnName]; return ANICUtils.ConvertNameFormat(value); }
/// <summary> /// Generates a <see cref="DataTable"/> based on the data stored within /// the entire data source after it was parsed. /// </summary> /// <returns> /// The <see cref="DataTable"/> containing all of the data in the data /// source. /// </returns> /// <exception cref="InvalidOperationException"> /// Attempting to read without properly setting up the <see cref="GenericParserAdapter"/>. /// </exception> /// <exception cref="ParsingException"> /// Thrown in the situations where the <see cref="GenericParserAdapter"/> cannot continue /// due to a conflict between the setup and the data being parsed. /// </exception> /// <example> /// <code lang="C#" escaped="true"> /// using (GenericParserAdapter p = new GenericParserAdapter(@"C:\MyData.txt")) /// DataTable dtResults = p.GetDataTable(); /// </code> /// </example> public DataTable GetDataTable() { DataRow drRow; DataTable dtData; int intCreatedColumns, intSkipRowsAtEnd; dtData = new DataTable(); dtData.BeginLoadData(); intCreatedColumns = 0; while (this.Read()) { // See if we have the appropriate number of columns. if (this.m_lstColumnNames.Count > intCreatedColumns) { // Add in our column to store off the file line number. if (this.m_blnIncludeFileLineNumber && (intCreatedColumns < 1)) { dtData.Columns.Add(GenericParserAdapter.FILE_LINE_NUMBER); } for (int intColumnIndex = intCreatedColumns; intColumnIndex < this.m_lstColumnNames.Count; ++intColumnIndex, ++intCreatedColumns) { GenericParserAdapter.AddColumnToTable(dtData, this.m_lstColumnNames[intColumnIndex]); } } if (!this.IsCurrentRowEmpty || !this.SkipEmptyRows) { drRow = dtData.NewRow(); if (this.m_blnIncludeFileLineNumber) { drRow[0] = this.FileRowNumber; // Now, add in the data retrieved from the current row. for (int intColumnIndex = 0; intColumnIndex < this.m_lstData.Count; ++intColumnIndex) { drRow[intColumnIndex + 1] = this.m_lstData[intColumnIndex]; } } else { // Since we don't have to account for the row number, just place the value right into the data row. drRow.ItemArray = this.m_lstData.ToArray(); } dtData.Rows.Add(drRow); } } intSkipRowsAtEnd = this.m_intSkipEndingDataRows; // Remove any rows at the end that need to be skipped. while ((intSkipRowsAtEnd-- > 0) && (dtData.Rows.Count > 0)) { dtData.Rows.RemoveAt(dtData.Rows.Count - 1); } dtData.EndLoadData(); return(dtData); }
protected String Default(BVPImportColumnDefinition columnDef, GenericParserAdapter row, Dictionary<String, List<String>> extraData) { var value = row[columnDef.SourceColumnName]; return value; }
private void btnRunAFIGeneration_Click(object sender, EventArgs e) { decimal allToFeesThreshold = nudAllToFeeThreshold.Value; decimal stdAdminFee = nudStdAdminFee.Value; string originatorName = tbOriginatorName.Text; //Read in the source file GenericParserAdapter myParser = new GenericParserAdapter(); myParser.SetDataSource(tbAFISource.Text); myParser.FirstRowHasHeader = true; myParser.ColumnDelimiter = ','; System.Data.DataTable myDataTable = myParser.GetDataTable(); //Begin constructing the AFI files FileStream afiStream = File.Create(tbAFIOutput.Text); FileStream csvStreamBankNotes = File.Create(tbBankNotesFile.Text); //Add the Header record to our output AddText(afiStream, "1,,,,0208280053460003,7,120330,120330,\r\n"); decimal txnCount = 0.00M; string irdBankAccount = "0300490001100027"; decimal hashSectionOfIRDAccount = decimal.Parse(irdBankAccount.Substring(2, 11)); decimal sumDollars = 0.00M; decimal sumAdminDollars = 0.00M; decimal sumReturnedDollars = 0.00M; //Loop over each row, converting into the required format and writing to file foreach (DataRow currRow in myDataTable.Rows) { decimal amtInDollars = decimal.Parse((string)currRow["Amount"]); decimal returnedDollars = 0.00M; decimal adminDollars = 0.00M; //If under the "all to fees" threshold, the whole amount goes towards admin fees if (amtInDollars < allToFeesThreshold) { returnedDollars = 0.00M; adminDollars = amtInDollars; sumAdminDollars += adminDollars; sumReturnedDollars += returnedDollars; sumDollars += amtInDollars; //Make appropriate notes to go on file AddText(csvStreamBankNotes, currRow["FoundIRD"] + ", [Imported Note] Cheque for $" + String.Format("{0:0.00}", amtInDollars) + " issued on " + currRow["ChequeDate"] + " has gone stale. Is not being returned to IRD as amount is too small. Full amount of $" + String.Format("{0:0.00}", amtInDollars) + " will be retained for admin work.\r\n"); } else //If over the "all to fees" threshold, standard admin charges apply { adminDollars = stdAdminFee; returnedDollars = (amtInDollars - stdAdminFee); sumAdminDollars += adminDollars; sumReturnedDollars += returnedDollars; sumDollars += amtInDollars; //Build the transaction StringBuilder currTxn = new StringBuilder(); currTxn.Append("2,"); //Transaction record currTxn.Append(irdBankAccount + ","); //Other party bank acct currTxn.Append("50,"); //Transaction Code currTxn.Append(String.Format("{0:0.00}", returnedDollars).Replace(".", "") + ","); //Amount to be returned to IRD in cents, as a string currTxn.Append("IRD,"); //Other party name currTxn.Append(currRow["FoundIRD"] + ","); //Other Party Reference currTxn.Append(","); //Other Party Code currTxn.Append(","); //Other Party Alpha Reference currTxn.Append("INC 31032012,"); //Other Party Particulars currTxn.Append(originatorName + ","); //Originator Name currTxn.Append(","); //Originator Code currTxn.Append(","); //Originator Reference currTxn.Append(""); //Originator Particulars = blank, last column, so no comma follows currTxn.Append("\r\n"); //Terminate the record with forced CrLf, no deferring to Environment //Record constructed, write it out AddText(afiStream, currTxn.ToString()); //Make appropriate notes to be imported AddText(csvStreamBankNotes, currRow["FoundIRD"] + ", [Imported Note] Cheque for $" + String.Format("{0:0.00}", amtInDollars) + " issued on " + currRow["ChequeDate"] + " is being returned to IRD tonight as has gone stale. $" + String.Format("{0:0.00}", returnedDollars) + " will be returned to IRD and $" + String.Format("{0:0.00}", adminDollars) + " will be retained for admin work.\r\n"); txnCount += 1; currTxn = null; } } //Calculate the hash string hashTotal = String.Format("{0:0}",(hashSectionOfIRDAccount * txnCount)); if (hashTotal.Length < 11) hashTotal = hashTotal.PadLeft(11, '0'); else if (hashTotal.Length > 11) hashTotal = hashTotal.Substring(hashTotal.Length - 11); //Add the Trailer record to our output AddText(afiStream, "3," + String.Format("{0:0.00}", sumReturnedDollars).Replace(".", "") + "," + String.Format("{0:0}",txnCount) + "," + hashTotal + "\r\n"); //Close the file, we're done :) afiStream.Close(); afiStream = null; csvStreamBankNotes.Close(); csvStreamBankNotes = null; //Verify the transactions sum to correct amounts MessageBox.Show("Transaction total = $" + String.Format("{0:0.00}",sumDollars) + " = Returned amount + Admin fees?: " + (sumDollars == (sumAdminDollars + sumReturnedDollars))); MessageBox.Show("Write this number down and give to Accounts. This is admin fees: $" + sumAdminDollars); }
public static TestOutput CorrectColour(ForeGroundStrucuture[] foregorungRGB_CPU, BackGroundStrucuture[] BackgroundXYZ_CPU) { //rgb = System.Drawing.Color.FromArgb(69, 77, 217); //X = 0.0630982813175294; //Y = 0.616476271122916; //Z = 0.667048468232457; const int image_size = 960 * 540; //cuda intializer CudafyModule km = CudafyModule.TryDeserialize(); if (km == null || !km.TryVerifyChecksums()) { // km = CudafyTranslator.Cudafy((typeof(ForeGroundStrucuture)), (typeof(BackGroundStrucuture)), typeof(Color)); km = CudafyTranslator.Cudafy((typeof(ProfileStrucuture)), (typeof(ForeGroundStrucuture)), (typeof(BackGroundStrucuture)), (typeof(SampleStructure)), typeof(snake)); km.TrySerialize(); } CudafyTranslator.GenerateDebug = true; // cuda or emulator GPGPU gpu = CudafyHost.GetDevice(CudafyModes.Target, CudafyModes.DeviceId); //GPGPU gpu = CudafyHost.GetDevice(eGPUType.Emulator); Console.WriteLine("Running quick correction using {0}", gpu.GetDeviceProperties(false).Name); gpu.LoadModule(km); ForeGroundStrucuture[] distance_CPU = new ForeGroundStrucuture[image_size]; // allocate memory on the GPU for the bitmap (same size as ptr) #region DataTable profile = new DataTable(); try { // add the csv bin file using (GenericParserAdapter parser = new GenericParserAdapter(@"C:\lev\STColorCorrection\Data\PROFILE\p3700.csv")) { System.Data.DataSet dsResult = parser.GetDataSet(); profile = dsResult.Tables[0]; } } catch (Exception ex) { Console.WriteLine(ex); } #endregion // allocate temp memory, initialize it, copy to constant memory on the GPU // L 0-21 A 0-41 B 0-45 ProfileStrucuture[, ,] profiles_CPU = new ProfileStrucuture[21, 41, 45]; SampleStructure[,] samples_CPU = new SampleStructure[image_size, 6]; //profile inicialization #region for (int indexL = 0; indexL < 21; indexL++) { for (int indexA = 0; indexA < 41; indexA++) { for (int indexB = 0; indexB < 45; indexB++) { profiles_CPU[indexL, indexA, indexB].L = indexL; profiles_CPU[indexL, indexA, indexB].A = indexA; profiles_CPU[indexL, indexA, indexB].B = indexB; profiles_CPU[indexL, indexA, indexB].Given_R = 0; profiles_CPU[indexL, indexA, indexB].Given_G = 0; profiles_CPU[indexL, indexA, indexB].Given_B = 0; profiles_CPU[indexL, indexA, indexB].ML = 0; profiles_CPU[indexL, indexA, indexB].MA = 0; profiles_CPU[indexL, indexA, indexB].MB = 0; profiles_CPU[indexL, indexA, indexB].MX = 0; profiles_CPU[indexL, indexA, indexB].MY = 0; profiles_CPU[indexL, indexA, indexB].MZ = 0; profiles_CPU[indexL, indexA, indexB].distance = -1.0; profiles_CPU[indexL, indexA, indexB].weight = -1.0; profiles_CPU[indexL, indexA, indexB].isempty = TRUE; profiles_CPU[indexL, indexA, indexB].isMoreAccurateThanOrigin = FALSE; } } } int lvalue, avalue, bvalue; try { for (int i = 1; i < profile.Rows.Count; i++) { lvalue = Convert.ToInt32(profile.Rows[i][0].ToString()); avalue = Convert.ToInt32(profile.Rows[i][1].ToString()); bvalue = Convert.ToInt32(profile.Rows[i][2].ToString()); lvalue = (int)(lvalue * 0.2); avalue = (int)(avalue * 0.2) + 20; bvalue = (int)(bvalue * 0.2) + 22; profiles_CPU[lvalue, avalue, bvalue].L = lvalue; profiles_CPU[lvalue, avalue, bvalue].A = avalue; profiles_CPU[lvalue, avalue, bvalue].B = bvalue; profiles_CPU[lvalue, avalue, bvalue].Given_R = (byte)Convert.ToByte(profile.Rows[i][9].ToString()); profiles_CPU[lvalue, avalue, bvalue].Given_G = (byte)Convert.ToByte(profile.Rows[i][10].ToString()); profiles_CPU[lvalue, avalue, bvalue].Given_B = (byte)Convert.ToByte(profile.Rows[i][11].ToString()); profiles_CPU[lvalue, avalue, bvalue].ML = (double)Convert.ToDouble(profile.Rows[i][3].ToString()); profiles_CPU[lvalue, avalue, bvalue].MA = (double)Convert.ToDouble(profile.Rows[i][4].ToString()); profiles_CPU[lvalue, avalue, bvalue].MB = (double)Convert.ToDouble(profile.Rows[i][5].ToString()); profiles_CPU[lvalue, avalue, bvalue].MX = (double)Convert.ToDouble(profile.Rows[i][6].ToString()); profiles_CPU[lvalue, avalue, bvalue].MY = (double)Convert.ToDouble(profile.Rows[i][7].ToString()); profiles_CPU[lvalue, avalue, bvalue].MZ = (double)Convert.ToDouble(profile.Rows[i][8].ToString()); profiles_CPU[lvalue, avalue, bvalue].isempty = FALSE; } } catch (Exception ex) { Console.WriteLine(ex); } #endregion //grab the colors ProfileStrucuture[, ,] profile_GPU = gpu.CopyToDevice(profiles_CPU); SampleStructure[,] samples_GPU = gpu.CopyToDevice(samples_CPU); //begin execution // capture the start time gpu.StartTimer(); ForeGroundStrucuture[] foregorungRGB_GPU = gpu.CopyToDevice(foregorungRGB_CPU); BackGroundStrucuture[] BackgroundXYZ_GPU = gpu.CopyToDevice(BackgroundXYZ_CPU); //out put ForeGroundStrucuture[] distance_GPU = gpu.Allocate(distance_CPU); // generate a bitmap from our sphere data //Image size: 1024 x 768 dim3 grids = new dim3(24, 675); dim3 threads = new dim3(8, 4); //dim3 grids = new dim3(1, 1); //dim3 threads = new dim3(1, 1); //quick_correct //gpu.Launch(grids, threads, ((Action<GThread, ProfileStrucuture[, ,], ForeGroundStrucuture[], BackGroundStrucuture[], ProfileStrucuture[], SampleStructure[,]>)QuickCorr), profile_GPU, foregorungRGB_GPU, BackgroundXYZ_GPU, distance_GPU, samples_GPU); //quick correct - testing gpu.Launch(grids, threads, ((Action<GThread, ProfileStrucuture[, ,], ForeGroundStrucuture[], BackGroundStrucuture[], ForeGroundStrucuture[], SampleStructure[,]>)Snake), profile_GPU, foregorungRGB_GPU, BackgroundXYZ_GPU, distance_GPU, samples_GPU); // copy our bitmap back from the GPU for display gpu.CopyFromDevice(distance_GPU, distance_CPU); // get stop time, and display the timing results double elapsedTime = gpu.StopTimer(); TestOutput to_return = new TestOutput(); to_return.output_image = distance_CPU; to_return.timeTaken = elapsedTime; Console.WriteLine("Time to generate: {0} ms", elapsedTime); gpu.Free(foregorungRGB_GPU); gpu.Free(BackgroundXYZ_GPU); gpu.Free(distance_GPU); gpu.FreeAll(); return to_return; }
private bool Validate() { if (string.IsNullOrWhiteSpace(txtFilename.Text)) { ErrorMessage.Show("You must specify an input file!"); return false; } if (!File.Exists(txtFilename.Text)) { ErrorMessage.Show("File does not exist!"); return false; } // Extract Column Headers... ColumnNames = new List<String>(); GenericParser parser = null; try { using (parser = new GenericParserAdapter(Filename)) { parser.ColumnDelimiter = Delimiter.ToCharArray()[0]; parser.FirstRowHasHeader = IsFirstRowContainNames; parser.MaxRows = 2; parser.TextQualifier = _textQualifier; if (parser.Read()) { for (int i = 0; i < parser.ColumnCount; ++i) { if (IsFirstRowContainNames) { ColumnNames.Add(parser.GetColumnName(i)); } else { ColumnNames.Add("Column" + i); } } } else { ErrorMessage.Show("Failed to extract column names from data source!"); return false; } } } finally { if (parser != null) { System.GC.SuppressFinalize(parser); } } return true; }
protected void IterateOverCSVZipEntry(ZipEntry entry, Func<GenericParserAdapter, bool> func) { using (var reader = new StreamReader(entry.OpenReader())) { using (var parser = new GenericParserAdapter(reader)) { parser.ColumnDelimiter = ','; parser.FirstRowHasHeader = true; parser.TextQualifier = '\"'; parser.FirstRowSetsExpectedColumnCount = true; while (parser.Read()) { if (func != null) { if (!func(parser)) { return; } } } } } }
private void shipOrders_Click(object sender, EventArgs e) { using (GenericParserAdapter parser = new GenericParserAdapter()) { try { parser.SetDataSource(tbShippingFile.Text); parser.ColumnDelimiter = ','; parser.FirstRowHasHeader = true; DataTable parsedData = parser.GetDataTable(); string inventory = "Mylan - Epipen.com"; int currentLine = 0; foreach (DataRow row in parsedData.Rows) { string error = ""; currentLine++; XmlDocument orderPrintConfirmXMLDoc = new XmlDocument(); bool bOrderPrintConfirmXmlSuccess = BuildOrderPrintConfirmXmlDoc(row["Reference2"].ToString(), ref orderPrintConfirmXMLDoc); if (bOrderPrintConfirmXmlSuccess) { bool bOrderPrintConfirmRequestSuccess = GetDirectResponseOrderPrintConfirmWebRequest(orderPrintConfirmXMLDoc, inventory, row["Reference2"].ToString(), ref error); if (bOrderPrintConfirmRequestSuccess) { XmlDocument orderXMLDoc = new XmlDocument(); bool bShipOrderXmlSuccess = BuildOrderShipXmlDoc(row["Reference2"].ToString(), DateTime.Today, ref orderXMLDoc); if (bShipOrderXmlSuccess) { bool bShipOrderRequestSuccess = GetDirectResponseOrderShipWebRequest(orderXMLDoc, inventory, row["Reference2"].ToString(), ref error); } else { MessageBox.Show("Error occurred building ship order XML request document for order " + row["Reference2"].ToString() + ". Line number " + currentLine); break; } } else { MessageBox.Show(error + Environment.NewLine + "Line number " + currentLine); break; } } else { MessageBox.Show("Error occurred building order print confirm XML request document for order " + row["Reference2"].ToString() + ". Line number " + currentLine); break; } } MessageBox.Show("All orders are shipped."); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } } }
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { if (extraData != null && extraData.ContainsKey(coldef.SourceColumnName)) { var mediaList = extraData[coldef.SourceColumnName]; if (mediaList != null && mediaList.Count > 0) { return mediaList[0]; } } return null; }
private void createDistroLists_Click(object sender, EventArgs e) { using (GenericParserAdapter parser = new GenericParserAdapter()) { try { parser.SetDataSource(tbOrderList.Text); parser.ColumnDelimiter = ','; parser.FirstRowHasHeader = true; DataTable parsedData = parser.GetDataTable(); DataTable uniqueKits = parsedData.DefaultView.ToTable(true, "Description"); foreach (DataRow row in uniqueKits.Rows) { DataTable kits = parsedData.Select("[Description] = '" + row["Description"].ToString() + "'").CopyToDataTable(); DataTable distro = new DistroList().CreateDistroTable(); foreach (DataRow kitRecord in kits.Rows) { DataRow newRecord = distro.NewRow(); newRecord["Address1"] = kitRecord["Address Line 1"]; newRecord["Address2"] = kitRecord["Address Line 2"]; newRecord["Address3"] = kitRecord["Address Line 3"]; newRecord["Attention"] = kitRecord["Ship To"]; newRecord["Company"] = kitRecord["Company"]; newRecord["City"] = kitRecord["City"]; newRecord["State"] = kitRecord["State"]; newRecord["Zip"] = kitRecord["Zip"].ToString().Substring(0, 5); newRecord["Telephone"] = kitRecord["Ship Phone"]; newRecord["Reference1"] = kitRecord["Item #"]; newRecord["Reference2"] = kitRecord["Order #"]; distro.Rows.Add(newRecord); } WriteDistroCSV(ConfigurationManager.AppSettings["OutputPath"] + DateTime.Today.ToString("MM-dd-yy") + "_" + row["Description"].ToString() + ".csv", distro); } MessageBox.Show("Distro lists successfully created."); } catch (Exception ex) { MessageBox.Show(ex.ToString()); } } }
protected DataTable LoadWithGenericParser(string CollectionName, string filePath, string order) { DataTable dt = new DataTable(); string wantedCSV = ""; foreach (string fileName in Directory.GetFiles(filePath)) { CultureInfo culture = new CultureInfo("en-US"); if (culture.CompareInfo.IndexOf(fileName, CollectionName, CompareOptions.IgnoreCase) == -1) { continue; } else if (culture.CompareInfo.IndexOf(fileName, CollectionName, CompareOptions.IgnoreCase) > 0) { wantedCSV = fileName; break; } else { Logger.Instance.Info("File Not Found. Press Any Key"); Console.ReadKey(); } } //return CSVReader.instance.GetData(wantedCSV, '\n', ','); using (GenericParserAdapter parser = new GenericParserAdapter(wantedCSV)) { parser.FirstRowHasHeader = true; dt = parser.GetDataTable(); } Dictionary<string, List<DataRow>> dict = dt.AsEnumerable() .OrderBy(x => x.Field<string>("keen.timestamp")) .GroupBy(x => x.Field<string>("sessionid")) .ToDictionary(x => x.Key, y => y.ToList()); return dt; }
private void ProfanityCheck(object sender, DoWorkEventArgs e) { CheckForIllegalCrossThreadCalls = false; //bad practice, but don't care :) BackgroundWorker worker = sender as BackgroundWorker; //Set up the profanity checker GenericParserAdapter profanityFileParser = new GenericParserAdapter(); profanityFileParser.SetDataSource(tbProfanityList.Text); profanityFileParser.FirstRowHasHeader = true; profanityFileParser.ColumnDelimiter = ','; System.Data.DataTable profanityFileTable = profanityFileParser.GetDataTable(); List<string> profanityList = new List<string>(); foreach(DataRow currentRow in profanityFileTable.Rows) { profanityList.Add((string)currentRow[0]); } Censor myProfanityCensor = new Censor(profanityList); //Set up the file to be checked for profanity GenericParserAdapter checkFileParser = new GenericParserAdapter(); checkFileParser.SetDataSource(tbCheckFile.Text); checkFileParser.FirstRowHasHeader = true; checkFileParser.ColumnDelimiter = ','; System.Data.DataTable checkFileTable = checkFileParser.GetDataTable(); //Set up various ouput files FileStream nonProfaneStream = File.Create(tbCheckFile.Text + ".clean.csv"); FileStream profaneStream = File.Create(tbCheckFile.Text + ".dirty.csv"); //Run the profanity check, outputting data as we go int totalSteps = checkFileTable.Rows.Count; int iterations = 0; foreach (DataRow currentRow in checkFileTable.Rows) { iterations += 1; bool taintedRow = false; String profanity = ""; String profanityContext = ""; //Check for profanity foreach (String currItem in currentRow.ItemArray) { //if profane, break the loop and proceed to file output if (myProfanityCensor.ContainsProfanity(currItem,ref profanity)) { taintedRow = true; profanityContext = currItem; break; } } //Output to appropriate file; if (taintedRow) { AddText(profaneStream, profanity + ","); AddText(profaneStream, profanityContext + ","); for (int i = 0; i < currentRow.ItemArray.Length - 1; i++) { AddText(profaneStream, EncapsulateSpeech(currentRow.ItemArray[i].ToString()) + ","); } if (currentRow.ItemArray.Length >= 1) AddText(profaneStream, EncapsulateSpeech( currentRow.ItemArray[currentRow.ItemArray.Length-1].ToString()) + Environment.NewLine); } else { for (int i = 0; i < currentRow.ItemArray.Length - 1; i++) { AddText(nonProfaneStream, EncapsulateSpeech(currentRow.ItemArray[i].ToString()) + ","); } if (currentRow.ItemArray.Length >= 1) AddText(nonProfaneStream, EncapsulateSpeech(currentRow.ItemArray[currentRow.ItemArray.Length - 1].ToString()) + Environment.NewLine); } //Report progress to UI worker.ReportProgress((int)(((double)iterations / (double)totalSteps) * 100)); } //Tidy up profaneStream.Close(); nonProfaneStream.Close(); }
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { var value = Default(coldef, row, extraData); value = value.Replace("\\n", " "); var m = _initals.Match(value); if (m.Success) { value = m.Groups[1].Value; } else { m = _fullword.Match(value); if (m.Success) { value = m.Groups[1].Value; } } return value; }
private void btnRun_Click(object sender, EventArgs e) { //Column header references for Alpha extract file, 0-indexed. Update as needed. int alphaDBIRDCol = 0; int alphaDBFirstNameCol = 1; int alphaDBSurnameCol = 2; int alphaDBMobileCol = 6; int alphaDBEmailCol = 7; int alphaDBStatusCol = 8; int alphaDBAddress1Col = 10; int alphaDBAddress2Col = 11; int alphaDBSuburbCol = 12; int alphaDBCityCol = 13; int alphaDBPostcodeCol = 14; //Column header references for MYOB extract file, 0-indexed. Update as needed. int myobIRDCol = 0; int myobNameCol = 1; int myobClientIDCol = 2; //Column header references for Client List file, 0-indexed. These are the Mail Merge field names. Update as needed string clientListIRD = "IRDNum"; string clientListFirstName = "FirstName"; string clientListSurname = "Surname"; string clientListEmail = "Email"; string clientListCellphone = "Cellphone"; string clientListAddress1 = "Address1"; string clientListAddress2 = "Address2"; string clientListSuburb = "Suburb"; string clientListCity = "City"; string clientListPostcode = "Postcode"; //Record our sending priorities int letterPriority = cmbLetterPriorities.SelectedIndex + 1; int emailPriority = cmbEmailPriorities.SelectedIndex + 1; int cellPriority = cmbTxtPriorities.SelectedIndex + 1; if (!(letterPriority == 4 && emailPriority == 4 && cellPriority == 4)) { //Stash all Alpha extract info in a DataTable, and clean the IRD numbers. Don't bother cleaning anything else till later - we'll only clean if we want that client's info GenericParserAdapter alphaDBParser = new GenericParserAdapter(); alphaDBParser.SetDataSource(tbAlphaDB.Text); alphaDBParser.FirstRowHasHeader = false; alphaDBParser.ColumnDelimiter = ','; System.Data.DataTable alphaDBTable = alphaDBParser.GetDataTable(); //This column gets a name because we need to do SELECT statements using this later alphaDBTable.Columns[alphaDBIRDCol].ColumnName = clientListIRD; //Stash all MYOB (NZAccountant) extract info in a DataTable GenericParserAdapter myobDBParser = new GenericParserAdapter(); System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceNames(); myobDBParser.SetDataSource((new DirectoryInfo(Assembly.GetExecutingAssembly().Location).Parent.FullName) + "\\Resources\\20120309_myob_client_dump.csv"); myobDBParser.FirstRowHasHeader = true; myobDBParser.ColumnDelimiter = ','; System.Data.DataTable myobDBTable = myobDBParser.GetDataTable(); //This column gets a name because we need to do SELECT statements using this later myobDBTable.Columns[myobIRDCol].ColumnName = clientListIRD; foreach (DataRow currRow in alphaDBTable.Rows) { string irdNumString = Regex.Replace((string)currRow[alphaDBIRDCol], "\\D", ""); //TODO: This can cause exceptions. Really should handle this int extractedIRDNum = 0; bool extractedIRDBool = Int32.TryParse(irdNumString, out extractedIRDNum); currRow[alphaDBIRDCol] = extractedIRDBool ? extractedIRDNum.ToString() : irdNumString; } //Put all the supplied ird numbers for clients we're distributing to into a DataTable GenericParserAdapter clientListParser = new GenericParserAdapter(); clientListParser.SetDataSource(tbClientNums.Text); clientListParser.FirstRowHasHeader = false; clientListParser.ColumnDelimiter = ','; clientListParser.ExpectedColumnCount = 1; System.Data.DataTable clientListTable = clientListParser.GetDataTable(); clientListTable.Columns[0].ColumnName = clientListIRD; clientListTable.Columns.Add(clientListFirstName); clientListTable.Columns.Add(clientListSurname); clientListTable.Columns.Add(clientListEmail); clientListTable.Columns.Add(clientListCellphone); clientListTable.Columns.Add(clientListAddress1); clientListTable.Columns.Add(clientListAddress2); clientListTable.Columns.Add(clientListSuburb); clientListTable.Columns.Add(clientListCity); clientListTable.Columns.Add(clientListPostcode); //We're going to need a DataTable for reporting errors too System.Data.DataTable errorTable = new System.Data.DataTable(); errorTable.Columns.Add("IRD Number"); errorTable.Columns.Add("Error Description"); //For each client we're distributing to, retrieve (and clean) their information from the alphaDbTable Regex emailRegex = new Regex(@"^(([^<>()[\]\\.,;:\s@\""]+" + @"(\.[^<>()[\]\\.,;:\s@\""]+)*)|(\"".+\""))@" + @"((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}" + @"\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+" + @"[a-zA-Z]{2,}))$"); //http://geekswithblogs.net/VROD/archive/2007/03/16/109007.aspx System.Data.DataTable postListTable = clientListTable.Clone(); System.Data.DataTable emailListTable = clientListTable.Clone(); System.Data.DataTable cellListTable = clientListTable.Clone(); foreach (DataRow currClient in clientListTable.Rows) { //Check if it's an MYOB client. If so, this is an error if (myobDBTable.Select(clientListIRD + " = \'" + ((string)currClient[clientListIRD]) + "\'").Length == 0) { DataRow[] alphaClientDetailsArray; alphaClientDetailsArray = alphaDBTable.Select(clientListIRD + " = \'" + ((string)currClient[clientListIRD]) + "\'"); //"IRDNum = [search term]" DataColumnCollection test = alphaDBTable.Columns; //If we successfully retrieved the client's details, clean and record them. If not, mark this as an error client if (alphaClientDetailsArray.Length > 0) { //Ignore multiple results, just take the first match. DataRow alphaClientDetails = alphaClientDetailsArray[0]; //If successfully retrieved data, check if Address Line 1 is a dud. If so, mark this as an error client currClient[clientListFirstName] = EncapsulateSpeech(ToTitleCase((string)alphaClientDetails[alphaDBFirstNameCol])); currClient[clientListSurname] = EncapsulateSpeech(ToTitleCase((string)alphaClientDetails[alphaDBSurnameCol])); currClient[clientListEmail] = emailRegex.IsMatch((string)alphaClientDetails[alphaDBEmailCol]) ? EncapsulateSpeech((string)alphaClientDetails[alphaDBEmailCol]) : ""; string tempCellphone = EncapsulateSpeech((string)alphaClientDetails[alphaDBMobileCol]); currClient[clientListCellphone] = cleanCellphone(ref tempCellphone) ? EncapsulateSpeech((string)tempCellphone) : ""; currClient[clientListAddress1] = EncapsulateSpeech(ToTitleCase((string)alphaClientDetails[alphaDBAddress1Col])); currClient[clientListAddress2] = EncapsulateSpeech(ToTitleCase((string)alphaClientDetails[alphaDBAddress2Col])); //If the city is blank (after removing whitespace), but there's a postcode, then put the postcode on the end of the suburb string tempCity = Regex.Replace((string)alphaClientDetails[alphaDBCityCol], @"\s+", ""); string tempPostCode = (string)alphaClientDetails[alphaDBPostcodeCol]; try { tempPostCode = Int32.Parse(tempPostCode) == 0 ? tempPostCode = "" : ((tempPostCode).Length < 4 && (tempPostCode).Length > 0 ? (tempPostCode).PadLeft(4, '0') : tempPostCode); //TRAP! If inside an if :) } catch (Exception postCodeFailed) //catch all :) { tempPostCode = ""; } string tempSuburb = (string)alphaClientDetails[alphaDBSuburbCol]; if (tempCity == "" && tempPostCode.Length > 0) { tempSuburb = tempSuburb + " " + tempPostCode; tempCity = ""; tempPostCode = ""; } currClient[clientListSuburb] = EncapsulateSpeech(ToTitleCase(tempSuburb)); currClient[clientListCity] = EncapsulateSpeech(ToTitleCase(tempCity)); currClient[clientListPostcode] = EncapsulateSpeech(tempPostCode); //Decide whether this is a mail client, an email client, or a cell client - then put into appropo DataTable // //TODO: This whole section is hideous and needs rewriting bool firstLevelFailure = false; bool secondLevelFailure = false; bool thirdLevelFailure = false; string tempTestAddress1 = (string)currClient[clientListAddress1]; string tempTestEmail = (string)currClient[clientListEmail]; string tempTestCellphone = (string)currClient[clientListCellphone]; switch (letterPriority) { case 1: firstLevelFailure = ((tempTestAddress1 == "" || tempTestAddress1 == "\"\"" || tempTestAddress1 == "0") && firstLevelFailure != true); break; case 2: secondLevelFailure = ((tempTestAddress1 == "" || tempTestAddress1 == "\"\"" || tempTestAddress1 == "0") && secondLevelFailure != true); break; case 3: thirdLevelFailure = ((tempTestAddress1 == "" || tempTestAddress1 == "\"\"" || tempTestAddress1 == "0") && thirdLevelFailure != true); break; default: break; } switch (emailPriority) { case 1: firstLevelFailure = ((tempTestEmail == "" || tempTestEmail == "\"\"" || tempTestEmail == "0") && firstLevelFailure != true); break; case 2: secondLevelFailure = ((tempTestEmail == "" || tempTestEmail == "\"\"" || tempTestEmail == "0") && secondLevelFailure != true); break; case 3: thirdLevelFailure = ((tempTestEmail == "" || tempTestEmail == "\"\"" || tempTestEmail == "0") && thirdLevelFailure != true); break; default: break; } switch (cellPriority) { case 1: firstLevelFailure = ((tempTestCellphone == "" || tempTestCellphone == "\"\"" || tempTestCellphone == "0") && firstLevelFailure != true); break; case 2: secondLevelFailure = ((tempTestCellphone == "" || tempTestCellphone == "\"\"" || tempTestCellphone == "0") && secondLevelFailure != true); break; case 3: thirdLevelFailure = ((tempTestCellphone == "" || tempTestCellphone == "\"\"" || tempTestCellphone == "0") && thirdLevelFailure != true); break; default: break; } bool actuallySentSomething = false; if (firstLevelFailure == false) { if (letterPriority == 1) { postListTable.ImportRow(currClient); actuallySentSomething = true; } if(emailPriority == 1) { emailListTable.ImportRow(currClient); actuallySentSomething = true; } if (cellPriority == 1) { cellListTable.ImportRow(currClient); actuallySentSomething = true; } } else if (secondLevelFailure == false) { if (letterPriority == 2) { postListTable.ImportRow(currClient); actuallySentSomething = true; } if (emailPriority == 2) { emailListTable.ImportRow(currClient); actuallySentSomething = true; } if (cellPriority == 2) { cellListTable.ImportRow(currClient); actuallySentSomething = true; } } else if (thirdLevelFailure == false) { if (letterPriority == 3) { postListTable.ImportRow(currClient); actuallySentSomething = true; } if (emailPriority == 3) { emailListTable.ImportRow(currClient); actuallySentSomething = true; } if (cellPriority == 3) { cellListTable.ImportRow(currClient); actuallySentSomething = true; } } else //Whelp, we've completely failed. { DataRow newErrorRow = errorTable.NewRow(); newErrorRow["IRD Number"] = currClient[clientListIRD]; newErrorRow["Error Description"] = "No valid contact details that match with what you want to send"; errorTable.Rows.Add(newErrorRow); } //Just in case we didn't find anything we could send to, but didn't have a failure at each level. Not sure this actually reachable, but being safe :) if (!actuallySentSomething) { DataRow newErrorRow = errorTable.NewRow(); newErrorRow["IRD Number"] = currClient[clientListIRD]; newErrorRow["Error Description"] = "No valid contact details that match with what you want to send. You should contact the client to update details or try and send your message by a different method."; errorTable.Rows.Add(newErrorRow); } } else { DataRow newErrorRow = errorTable.NewRow(); newErrorRow["IRD Number"] = currClient[clientListIRD]; newErrorRow["Error Description"] = "Could not find the client in the database. Please check you entered this correctly. If you did then check with NZAccountant and PBA. Tell developer/manager/team leader if not a client of either of them."; errorTable.Rows.Add(newErrorRow); } } else { DataRow newErrorRow = errorTable.NewRow(); newErrorRow["IRD Number"] = currClient[clientListIRD]; newErrorRow["Error Description"] = "This appears to be an NZAccountant Client; check with them. Tell developer/manager/team leader if not a client of NZA."; errorTable.Rows.Add(newErrorRow); } } //Create a temp file for the Error data, dump it out, and open it in Excel String errorFileLocation = System.IO.Path.GetTempFileName() + ".csv"; FileStream errorOutputStream = File.Create(errorFileLocation); if (errorTable.Rows.Count > 0) { //write headers int errorColumnCount = errorTable.Columns.Count; for (int i = 0; i < errorColumnCount; i++) { AddText(errorOutputStream, errorTable.Columns[i].ColumnName); if (i != errorColumnCount - 1) AddText(errorOutputStream, ","); } AddText(errorOutputStream, Environment.NewLine); //write data foreach (DataRow currClient in errorTable.Rows) { for (int i = 0; i < errorColumnCount; i++) { AddText(errorOutputStream, (string)currClient[i]); if (i != errorColumnCount - 1) AddText(errorOutputStream, ","); } AddText(errorOutputStream, Environment.NewLine); } errorOutputStream.Close(); } //Create a temp file for the Alpha import data, dump it out, and open it in Excel String alphaFileLocation = System.IO.Path.GetTempFileName() + ".csv"; FileStream alphaOutputStream = File.Create(alphaFileLocation); //write data foreach (DataRow currClient in postListTable.Rows) { AddText(alphaOutputStream, (string)currClient[clientListIRD] + ","); AddText(alphaOutputStream, EncapsulateSpeech(tbAlphaNotes.Text + ". Sent by Post")); AddText(alphaOutputStream, Environment.NewLine); } foreach (DataRow currClient in emailListTable.Rows) { AddText(alphaOutputStream, (string)currClient[clientListIRD] + ","); AddText(alphaOutputStream, EncapsulateSpeech(tbAlphaNotes.Text + ". Sent by Email")); AddText(alphaOutputStream, Environment.NewLine); } foreach (DataRow currClient in cellListTable.Rows) { AddText(alphaOutputStream, (string)currClient[clientListIRD] + ","); AddText(alphaOutputStream, EncapsulateSpeech(tbAlphaNotes.Text + ". Sent by Text Message")); AddText(alphaOutputStream, Environment.NewLine); } alphaOutputStream.Close(); //Make the error file visible Excel._Application oExcel = new Excel.Application(); oExcel.Visible = true; oExcel.ScreenUpdating = true; if (errorTable.Rows.Count > 0) { oExcel.Workbooks.Open(errorFileLocation, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value); } //Make the notes file visible oExcel.Workbooks.Open(alphaFileLocation, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value); //Send emails. //COMMENTED OUT BEHAVIOUR: If no emails, let the user know if (emailListTable.Rows.Count > 0) { int columnCount = emailListTable.Columns.Count; StreamReader myEmailStream = new System.IO.StreamReader(@tbEmailTemplate.Text); string myEmailString = myEmailStream.ReadToEnd(); myEmailStream.Close(); foreach (DataRow currClient in emailListTable.Rows) { string tempEmailString = myEmailString; tempEmailString = tempEmailString.Replace("[[[[FIRST_NAME]]]]", RemoveSpeech((string)currClient[clientListFirstName])); tempEmailString = tempEmailString.Replace("[[[[IRD_NUMBER]]]]", RemoveSpeech((string)currClient[clientListIRD])); tempEmailString = tempEmailString.Replace(Environment.NewLine, "<br/>"); tempEmailString = HTMLEncodeSpecialChars(tempEmailString); //myEmailString = myEmailString.Replace("[[[[FULL_NAME]]]]", (string)currClient[clientListIRD]); tempEmailString = Resources.EmailContent.Replace("[[[[EMAIL_CONTENT]]]]", tempEmailString); SendEmail( RemoveSpeech((string)currClient[clientListEmail]), tempEmailString,tbSubject.Text.Replace("<FirstName>",RemoveSpeech((string)currClient[clientListFirstName])).Replace("<LastName>",RemoveSpeech((string)currClient[clientListSurname])), BodyType.HTML); } } else { //MessageBox.Show("No emails were sent"); } //Send texts. //COMMENTED OUT BEHAVIOUR: If no emails, let the user know if (cellListTable.Rows.Count > 0) { int columnCount = cellListTable.Columns.Count; StreamReader myEmailStream = new System.IO.StreamReader(tbTextTemplate.Text); string myEmailString = myEmailStream.ReadToEnd(); myEmailStream.Close(); foreach (DataRow currClient in cellListTable.Rows) { string tempEmailString = myEmailString; tempEmailString = tempEmailString.Replace("[[[[FIRST_NAME]]]]", RemoveSpeech((string)currClient[clientListFirstName])); tempEmailString = tempEmailString.Replace("[[[[IRD_NUMBER]]]]", RemoveSpeech((string)currClient[clientListIRD])); SendEmail( RemoveSpeech((string)currClient[clientListCellphone] + "@sms.tnz.co.nz"), tempEmailString, "", BodyType.Text); } } else { //MessageBox.Show("No emails were sent"); } //Create a temp file for the mail merge data, and dump it all out. //COMMENTED OUT BEHAVIOUR: If no letters, let the user know if (postListTable.Rows.Count > 0) { String mergeFileLocation = System.IO.Path.GetTempFileName() + ".csv"; FileStream mergeOutputStream = File.Create(mergeFileLocation); //write headers int columnCount = postListTable.Columns.Count; for (int i = 0; i < columnCount; i++) { AddText(mergeOutputStream, postListTable.Columns[i].ColumnName); if (i != columnCount - 1) AddText(mergeOutputStream, ","); } AddText(mergeOutputStream, Environment.NewLine); //write data foreach (DataRow currClient in postListTable.Rows) { for (int i = 0; i < columnCount; i++) { AddText(mergeOutputStream, (string)currClient[i]); if (i != columnCount - 1) AddText(mergeOutputStream, ","); } AddText(mergeOutputStream, Environment.NewLine); } mergeOutputStream.Close(); //Run the mail merge Object oMailMergeFile = tbLetterTemplate.Text; Object oMissing = System.Reflection.Missing.Value; Object oFalse = false; Object oTrue = true; Object oSql = "Select * from [Table1$]"; Word._Application oWord = new Word.Application(); oWord.Visible = false; oWord.ScreenUpdating = false; Word._Document myDoc = oWord.Documents.Add(ref oMailMergeFile, ref oMissing, ref oMissing, ref oMissing); myDoc.MailMerge.MainDocumentType = Word.WdMailMergeMainDocType.wdFormLetters; myDoc.MailMerge.OpenDataSource(@mergeFileLocation, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oMissing, ref oSql, ref oMissing, ref oMissing, ref oMissing); myDoc.MailMerge.Destination = Word.WdMailMergeDestination.wdSendToNewDocument; myDoc.MailMerge.Execute(ref oFalse); saveMergeDocs(oWord); myDoc.Close(ref oFalse, ref oMissing, ref oMissing); //Make the merge visible oWord.Visible = true; oWord.ScreenUpdating = true; oWord.Activate(); File.Delete(mergeFileLocation); } else { //MessageBox.Show("No letters were printed"); } //DEBUG: comment the following out for production //oExcel.Workbooks.Open(mergeFileLocation, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value, Missing.Value); } else { MessageBox.Show("Please set either email, text, or letter to something other than \"Do Not Send\""); } }
private void Preview() { GenericParserAdapter parser = null; try { using (parser = new GenericParserAdapter(Filename)) { parser.ColumnDelimiter = Delimiter.ToCharArray()[0]; parser.FirstRowHasHeader = IsFirstRowContainNames; parser.MaxRows = 50; parser.TextQualifier = '\"'; var ds = parser.GetDataSet(); previewGrid.AutoGenerateColumns = true; previewGrid.ItemsSource = ds.Tables[0].DefaultView; System.GC.SuppressFinalize(parser); } } finally { if (parser != null) { GC.SuppressFinalize(parser); } } }
private void MatchChequesAlt(object sender, DoWorkEventArgs e) { CheckForIllegalCrossThreadCalls = false; //bad practice, but don't care :) BackgroundWorker worker = sender as BackgroundWorker; string delimiter = ","; GenericParserAdapter myParser = new GenericParserAdapter(); myParser.SetDataSource(tbStaleChequeList.Text); myParser.FirstRowHasHeader = true; myParser.ColumnDelimiter = ','; System.Data.DataTable myDataTable = myParser.GetDataTable(); SortedList<double,Tuple<double,float>> chequeList = new SortedList<double,Tuple<double,float>>(); //<Cheque, <IRDNumber, amount>> FileStream errorStream = File.Create(tbOutputLocation.Text + ".errors.csv"); int errorCount = 0; //Loop over each row/cheque, first checking if stale and if so, adding it to a list to find foreach (DataRow currentRow in myDataTable.Rows) { try { chequeList.Add(Double.Parse((string)currentRow["ChequeNum"]), new Tuple<double, float>(Double.Parse((string)currentRow["FoundIRD"]), float.Parse((string)currentRow["Amount"]))); } catch (Exception staleParseFailure) { AddText(errorStream, "Shit went wrong. Here's some useful data: " + currentRow["FoundIRD"] + "; " +currentRow["Amount"] + ". Exception: \"" + staleParseFailure.ToString() + "\"" + Environment.NewLine); } } //Open each file which potentially contains what we want, and search for each cheque number. If found, dump the line and filename into a csv FileStream outputFileStream = File.Create(tbOutputLocation.Text); /*AddText(outputFileStream, "\"ChequeNum\"" + delimiter); AddText(outputFileStream, "\"Date\"" + delimiter); AddText(outputFileStream, "\"Amount\"" + Environment.NewLine);*/ var filesToCheck = System.IO.Directory.EnumerateFiles(@tbRootCHQFolder.Text, "*", SearchOption.AllDirectories); int totalSteps = filesToCheck.Count(); Excel._Application oExcel = new Excel.Application(); oExcel.Visible = false; oExcel.ScreenUpdating = false; oExcel.DisplayAlerts = false; int iterations = 0; foreach (var currentFile in filesToCheck) { lock (thisLock) { iterations += 1; try { Excel.Workbook myWorkBook = oExcel.Workbooks.Open(currentFile); //Check each worksheet in the book foreach (Excel.Worksheet currWorksheet in myWorkBook.Worksheets) { Excel.Range lastCell = currWorksheet.Cells.SpecialCells(Excel.XlCellType.xlCellTypeLastCell, Type.Missing); //if there's something in the worksheet then try and find the cheque if (lastCell.Column > 1) { Excel.Range firstCell = currWorksheet.get_Range("A1", Type.Missing); Excel.Range entireSheetRange = currWorksheet.get_Range(firstCell, lastCell); //foreach (var currCheque in chequeList) //I want to remove items as I'm going so am using the iterate backwards method, and using an enumerator causes errors! for (int currChequeIndex = chequeList.Count - 1; currChequeIndex > -1; currChequeIndex--) { double currChequeKey = chequeList.Keys[currChequeIndex]; Tuple<double,float> currChequeValues = chequeList.Values[currChequeIndex]; Excel.Range currentFind = null; //find the $ value of the cheque in the sheet, if poss currentFind = entireSheetRange.Find(currChequeValues.Item2, Type.Missing, Excel.XlFindLookIn.xlValues, Excel.XlLookAt.xlWhole, Excel.XlSearchOrder.xlByRows, Excel.XlSearchDirection.xlNext, false, Type.Missing, Type.Missing); //Found the cheque, write out the info we want if (currentFind != null) { //Remove the cheque from our list, as we're now wasting time looking for it //chequeList.Remove(currChequeKey); AddText(outputFileStream, "\"" + currChequeKey.ToString() + "\"" + delimiter); AddText(outputFileStream, "\"" + currChequeValues.Item1 + "\"" + delimiter); AddText(outputFileStream, "\"" + currChequeValues.Item2 + "\"" + delimiter); AddText(outputFileStream, "\"" + currentFile + "\"" + delimiter); //dump out the whole line we found the match on int numCols = currentFind.EntireRow.SpecialCells(Excel.XlCellType.xlCellTypeLastCell, Type.Missing).Column; for (int i = 1; i <= numCols; i++) { if (i != numCols) { AddText(outputFileStream, "\"" + currentFind.EntireRow.Cells[1, i].Text + "\"" + delimiter); } else { AddText(outputFileStream, "\"" + currentFind.EntireRow.Cells[1, i].Text + "\"" + Environment.NewLine); } } } currentFind = null; //find the IRD# in the sheet, if poss, as well currentFind = entireSheetRange.Find(currChequeValues.Item1, Type.Missing, Excel.XlFindLookIn.xlValues, Excel.XlLookAt.xlWhole, Excel.XlSearchOrder.xlByRows, Excel.XlSearchDirection.xlNext, false, Type.Missing, Type.Missing); //Found the cheque, write out the info we want if (currentFind != null) { //Remove the cheque from our list, as we're now wasting time looking for it //chequeList.Remove(currChequeKey); AddText(outputFileStream, "\"" + currChequeKey.ToString() + "\"" + delimiter); AddText(outputFileStream, "\"" + currChequeValues.Item1 + "\"" + delimiter); AddText(outputFileStream, "\"" + currChequeValues.Item2 + "\"" + delimiter); AddText(outputFileStream, "\"" + currentFile + "\"" + delimiter); //dump out the whole line we found the match on int numCols = currentFind.EntireRow.SpecialCells(Excel.XlCellType.xlCellTypeLastCell, Type.Missing).Column; for (int i = 1; i <= numCols; i++) { if (i != numCols) { AddText(outputFileStream, "\"" + currentFind.EntireRow.Cells[1, i].Text + "\"" + delimiter); } else { AddText(outputFileStream, "\"" + currentFind.EntireRow.Cells[1, i].Text + "\"" + Environment.NewLine); } } } } } } myWorkBook.Close(false, false, Type.Missing); System.Runtime.InteropServices.Marshal.ReleaseComObject(myWorkBook); //http://support.microsoft.com/kb/317109 } catch (Exception excelOpenFailure) { AddText(errorStream, "\"Excel operation error occurred. Trying to continue. Exception message: " + excelOpenFailure.Message + "\"" + Environment.NewLine); errorCount += 1; lblErrorCount.Text = ("Excel Error Count: " + errorCount); /*//if we fail, try and close the Excel instance if poss, and recreate it! If this completely fails, crash inelegantly :( try { oExcel.Visible = false; oExcel.ScreenUpdating = false; oExcel.DisplayAlerts = false; oExcel.Quit(); System.Runtime.InteropServices.Marshal.ReleaseComObject(oExcel); //http://support.microsoft.com/kb/317109 } catch (Exception doublecrash) { //Do nothing }*/ oExcel = null; oExcel = new Excel.Application(); oExcel.Visible = false; oExcel.ScreenUpdating = false; oExcel.DisplayAlerts = false; } //Report progress worker.ReportProgress((int)(((double)iterations / (double)totalSteps) * 100)); } } //Dump unfound cheques FileStream outputFileStream2 = File.Create(tbOutputLocation.Text + ".faileddump.csv"); if (chequeList.Count != 0) { foreach (var currCheque in chequeList) { AddText(outputFileStream2, "\"" + currCheque.Key.ToString() + "\"" + Environment.NewLine); } } //Shut down everything outputFileStream.Close(); outputFileStream = null; outputFileStream2.Close(); outputFileStream2 = null; errorStream.Close(); errorStream = null; oExcel.Visible = true; oExcel.ScreenUpdating = true; oExcel.DisplayAlerts = true; oExcel.Quit(); System.Runtime.InteropServices.Marshal.ReleaseComObject(oExcel); //http://support.microsoft.com/kb/317109 }
protected String RegexCapture(BVPImportColumnDefinition colDef, GenericParserAdapter row, Dictionary<String, List<String>> extra) { return row[colDef.SourceColumnName]; }
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { var value = Default(coldef, row, extraData); var dates = value.Split('/'); return ExtractDate(dates, value); }
public override string ExtractValue(BVPImportColumnDefinition columnDef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { return Default(columnDef, row, extraData); }
public override string ExtractValue(BVPImportColumnDefinition columnDef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { var value = Default(columnDef, row, extraData); if (!String.IsNullOrEmpty(value)) { value = value.Trim(); foreach (Regex regex in Regexes) { var matcher = regex.Match(value); if (matcher.Success) { if (matcher.Groups.Count > 1) { return matcher.Groups[1].Value; } } } } return value; }
private void btnBrowseForDataFile_Click(object sender, EventArgs e) { diagOpenFile.ShowDialog(); if (!String.IsNullOrEmpty(diagOpenFile.FileName)) { txtDataFile.Text = diagOpenFile.FileName; using (GenericParserAdapter parser = new GenericParserAdapter(diagOpenFile.FileName)) { parser.FirstRowHasHeader = true; parser.Read(); sourceData = parser.GetDataTable(); } foreach (DataColumn column in sourceData.Columns) { cboColumnNames.Items.Add(column.ColumnName); } } }
public abstract String ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<String, List<String>> extraData);
public override string ExtractValue(BVPImportColumnDefinition coldef, GenericParserAdapter row, Dictionary<string, List<string>> extraData) { if (extraData != null && extraData.ContainsKey(coldef.SourceColumnName)) { var values = extraData[coldef.SourceColumnName]; if (ValueFormatter != null) { var formattedValues = values.Select((value) => ValueFormatter(value)); return String.Join(Delimiter, formattedValues); } else { return String.Join(Delimiter, values); } } return ""; }
/// <summary> /// Loads the data set. /// </summary> private static void LoadDataSet() { try { _stopwatch.Start(); using (var parser = new GenericParserAdapter(_options.Source)) { parser.ColumnDelimiter = _options.Delimiter; parser.FirstRowHasHeader = _options.Header; _dataSet = parser.GetDataSet(); } } catch (Exception e) { ExitFailure(e.Message); } }