/// <summary> /// Get a List of statement document lines from an array of strings. /// </summary> /// <param name="lines">Array of text lines in the format specified by Atrio as a statement file.</param> /// <returns></returns> public static HeaderSource <List <StringMap>, List <string> > ParseM691(string[] lines) { List <StringMap> tempMapList = new List <StringMap>(); List <string> headerList = new List <string>(); foreach (string line in lines) { StringMap docLine = new StringMap(); docLine.Add("Group Billing Acct ID", Parse.TrimSubstring(line, 29, 9)); headerList.Add("Group Billing Acct ID"); docLine.Add("accountNumberGroup", docLine["Group Billing Acct ID"]); docLine.Add("AccountNumberGroup", docLine["Group Billing Acct ID"]); docLine.Add("Invoice Number", Parse.TrimSubstring(line, 20, 9)); headerList.Add("Invoice Number"); docLine.Add("invoiceNum", docLine["Invoice Number"]); docLine.Add("InvoiceNum", docLine["Invoice Number"]); docLine.Add("Invoice Amount", Parse.TrimSubstring(line, 47, 19)); headerList.Add("Invoice Amount"); docLine.Add("invoiceAmount", docLine["Invoice Amount"]); docLine.Add("InvoiceAmount", docLine["Invoice Amount"]); docLine.Add("InvoiceAmt", docLine["Invoice Amount"]); docLine.Add("Low-Income Subsidy Amount", Parse.TrimSubstring(line, 66, 19)); headerList.Add("Low-Income Subsidy Amount"); docLine.Add("Low Income Subsidy Amount", docLine["Low-Income Subsity Amount"]); docLine.Add("lowIncomeSubsidy", docLine["Low-Income Subsity Amount"]); docLine.Add("LowIncomeSubsidy", docLine["Low-Income Subsity Amount"]); docLine.Add("Late-Enrollment Penalty Amount", Parse.TrimSubstring(line, 85, 19)); headerList.Add("Late-Enrollment Penalty Amount"); docLine.Add("Late Enrollment Penalty Amount", docLine["Late-Enrollment Penalty Amount"]); docLine.Add("lateEnrollmentPenalty", docLine["Late-Enrollment Penalty Amount"]); docLine.Add("LateEnrollmentPenalty", docLine["Late-Enrollment Penalty Amount"]); docLine.Add("Invoice Period From Date", Parse.TrimSubstring(line, 10, 10)); headerList.Add("Invoice Period From Date"); docLine.Add("fromDate", docLine["Invoice Period From Date"]); docLine.Add("FromDate", docLine["Invoice Period From Date"]); docLine.Add("Invoice Start Date", docLine["Invoice Period From Date"]); docLine.Add("Invoice Period To Date", Parse.TrimSubstring(line, 10, 10)); headerList.Add("Invoice Period To Date"); docLine.Add("Invoice End Date", docLine["Invoice Period To Date"]); docLine.Add("toDate", docLine["Invoice Period To Date"]); docLine.Add("ToDate", docLine["Invoice Period To Date"]); tempMapList.Add(docLine); } HeaderSource <List <StringMap>, List <string> > ret = new HeaderSource <List <StringMap>, List <string> >(tempMapList, headerList.ToArray()); // return type: headerSource<List<StringMap>, string> return(ret); }
/// <summary> /// Imports a single file as a delimited file with a header. Row 1 is always parsed as a header, and is used to construct resulting dictionaries /// by row. Each dict is row 1 as the keys and each following row of the file as the values. /// </summary> /// <param name="fileName">The full path of the file to import</param> /// <param name="delimiter">What is the delimiting character? i.e. comma, pipe, tab, etc.</param> /// <param name="useQuotes">Are there quotes around values?</param> // /// <param name="primaryKey">Primary key -- major index of composite if composite.</param> /// <param name="headers">A preloaded set of headers -- optional.</param> /// <returns>A List of Dictionary per row where KEY=Row1</returns> private static HeaderSource <List <StringMap>, List <string> > ImportCSVWithHeader( string fileName // , string primaryKey , string delimiter , bool useQuotes , IList <string> headers = null) { using (Stream readFile = File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { // CSV Parsing var csvRead = new TextFieldParser(readFile) { CommentTokens = new[] { "#" }, Delimiters = new[] { delimiter }, HasFieldsEnclosedInQuotes = useQuotes, TextFieldType = FieldType.Delimited, TrimWhiteSpace = true }; // if header is null, replace header with csvRead.ReadFields(), or with empty string if that's null. if (headers == null) { headers = csvRead.ReadFields(); } if (headers == null) { headers = new string[] { }; } List <StringMap> records = new List <StringMap>(); while (!csvRead.EndOfData) { string[] rowData = csvRead.ReadFields() ?? new string[] { }; var newRow = new StringMap(); for (int n = 0; n < rowData.Length; ++n) // len = number of fields. { newRow.Add(headers[n], rowData[n]); } records.Add(newRow); } List <string> headerList = new List <string>(headers); HeaderSource <List <StringMap>, List <string> > ret = new HeaderSource <List <StringMap>, List <string> > (records, headerList.ToArray()); return(ret); } }
// this implementation requires reverse engineering every time. /// <summary> /// Imports a single file as a delimited file with a header. Row 1 is always parsed as a header, and is used to construct resulting dictionaries /// by row. Each dict is row 1 as the keys and each following row of the file as the values. /// </summary> /// <param name="fileName">The full path of the file to import</param> /// <param name="delimiter">What is the delimiting character? i.e. comma, pipe, tab, etc.</param> /// <param name="useQuotes">Are there quotes around values?</param> /// <param name="headers">A preloaded set of headers -- optional.</param> /// <returns>A List of Dictionary per row where KEY=Row1</returns> public static HeaderSource <List <StringMap>, List <string> > ImportCSVWithHeader( string fileName , string delimiter , bool useQuotes , IList <string> headers = null) { using (Stream readFile = File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { // CSV Parsing var csvRead = new TextFieldParser(readFile) { CommentTokens = new[] { "#" }, Delimiters = new[] { delimiter }, HasFieldsEnclosedInQuotes = useQuotes, TextFieldType = FieldType.Delimited, TrimWhiteSpace = true }; // if header is null, replace header with csvRead.ReadFields(), or with empty string if that's null. headers ??= csvRead.ReadFields() ?? new string[] { }; // no action if headers provided in arguments. List <StringMap> records = new List <StringMap>(); while (!csvRead.EndOfData) { string[] rowData = csvRead.ReadFields() ?? new string[] { }; var newRow = Model_Index_Dict <string, string> .Model_Select( headers, rowData); //for (int n = 0; n < rowData.Length; ++n) // len = number of fields. //{ // newRow.Add(headers[n], rowData[n]); //} records.Add(new StringMap(newRow)); } HeaderSource <List <StringMap>, List <string> > ret = new HeaderSource <List <StringMap>, List <string> >(records, headers.ToArray()); return(ret); } }
, KeyedDocs <DocM504A_BalFwdRecord> > PopulateDocs(string[] files) { // each will be a dictionary of documents indexed by their respective IDs. Dictionary <AcctID, DocM691_Invoice> MCSB691 = new Dictionary <AcctID, DocM691_Invoice>(); KeyedDocs <DocM691_Invoice> M691Records = null; KeyedDocs <DocM690_MemberRecord> M690Records = null; KeyedDocs <DocM504A_BalFwdRecord> M504Records = null; Dictionary <AcctID, DocM504A_BalFwdRecord> MCSB504A = new Dictionary <AcctID, DocM504A_BalFwdRecord>(); var MCSB691_Primitive = new List <string>(); var MSCB690_CSVData = new Dictionary <string, List <StringMap> >(); DocType docType; string filename , fileExtension; foreach (string filePath in files) { filename = Path.GetFileName(filePath); fileExtension = Path.GetExtension(filePath); docType = IdentifyDoc(filename); // put each document type into its headersource (struct of Stringmap and headers list) switch (docType) { case (DocType.M691): MCSB691_Primitive = ImportRows(filename); HeaderSource <List <StringMap>, List <string> > src691 = DocM691_Invoice.ParseM691(MCSB691_Primitive.ToArray()); M691Records = new KeyedDocs <DocM691_Invoice>(src691); break; case (DocType.M690): var M690sByAcctID = ImportCSVWithHeader( filePath , primaryKey: "Group Billing Acct ID" , delimiter: "|" , useQuotes: false); M690Records = new KeyedDocs <DocM690_MemberRecord>(M690sByAcctID); break; case (DocType.M504A): string[] headers = DocM504A_BalFwdRecord.headers; var M504AsByAcctID = ImportCSVWithHeader( filePath , primaryKey: "Account Id" , "," , useQuotes: true , headers); M504Records = new KeyedDocs <DocM504A_BalFwdRecord>(M504AsByAcctID); break; case (DocType.Error): throw new Exception($"Unexpected file found: {filePath}"); } } return(new ValueTuple < KeyedDocs <DocM691_Invoice> , KeyedDocs <DocM690_MemberRecord> , KeyedDocs <DocM504A_BalFwdRecord> >( M691Records, M690Records, M504Records)); }
} // end method #endregion #endregion #region Private Core Algorithm /// <summary> /// Populate documents with information, in the proper order. /// </summary> /// <param name="files"></param> private void PopulateRecords(string[] files) { RecordType recordType; string filename , fileExtension; Queue<string> fileList = OrderFileList(files); foreach (string filePath in fileList) { filename = Path.GetFileName(filePath); fileExtension = Path.GetExtension(filePath); recordType = (RecordType)IdentifyRecordFile(filename); DataColumn[] _parentColumns; String[] _childColumns; // put each document type into its headersource (struct of Stringmap and headers list) switch (recordType) { case (RecordType.Statements): List<string> StatementRecordData = CSV.ImportRows(filePath); HeaderSource<List<StringMap>, List<string>> statementSrcData = Record_Statement.Sample.ParseRows(StatementRecordData.ToArray()); statementRecords = new FileDataRecords<Record_Statement, ClientETLProcess>( statementSrcData , AllTableHeadersByType , new ForeignKeyConstraintElements(this, typeof(Record_Statement).Name)); Log.Write("Statement Records files populated."); if (!TablesByType.ContainsKey(typeof(Record_Statement))) { TablesByType.Add(typeof(Record_Statement), new List<DataTable>() { statementRecords }); } else { TablesByType[typeof(Record_Statement)].Add(statementRecords); } break; case (RecordType.Members): // TO DO: once-over post-DataSet/DataTable type changes. // statementTable already populated, per OrderFileList(files) method. var membersByAcctID = CSV.ImportCSVWithHeader( filePath , delimiter: "|" , useQuotes: false); _parentColumns = new DataColumn[] { Tables[statementRecords.TableName].Columns["Group Billing Acct ID"] }; // parent key columns. _childColumns = new String[] { "Billing Account Number" }; // "must belong to a column" error. memberRecords = new FileDataRecords<Record_Members, ClientETLProcess>( membersByAcctID , AllTableHeadersByType , new ForeignKeyConstraintElements( this , _parentColumns , _childColumns ) ); if (!TablesByType.ContainsKey(typeof(Record_Members))) { TablesByType.Add(typeof(Record_Members), new List<DataTable>() { memberRecords }); } else { TablesByType[typeof(Record_Members)].Add(memberRecords); } Log.Write("Member Records populated."); break; case (RecordType.BalancesForward): // TO DO: once-over post-DataSet/DataTable type changes. // balfwdfile has no internal headers. List<string> headers = Record_BalFwd.Sample.Headers; // TO DO: var balFwdByAcctID = CSV.ImportCSVWithHeader( filePath , "," , useQuotes: true , headers); _parentColumns = new DataColumn[] { Tables[statementRecords.TableName].Columns["Group Billing Acct ID"] }; // parent key columns. _childColumns = new String[] { "Account ID" }; balFwdRecords = new FileDataRecords<Record_BalFwd, ClientETLProcess>( balFwdByAcctID , AllTableHeadersByType , new ForeignKeyConstraintElements( this , _parentColumns , _childColumns ) ); Log.Write("Balance Forward Records populated."); if (!TablesByType.ContainsKey(typeof(Record_BalFwd))) { TablesByType.Add(typeof(Record_BalFwd), new List<DataTable>() { balFwdRecords }); } else { TablesByType[typeof(Record_BalFwd)].Add(balFwdRecords); } break; case (RecordType.Error): throw new Exception($"Unexpected file found: {filePath}"); } } }
, KeyedDocs <BalFwdRecord> > PopulateDocs(string[] files) { // each will be a dictionary of documents indexed by their respective IDs. KeyedDocs <InvoiceData> InvoiceData = null; KeyedDocs <MemberRecord> MemberData = null; KeyedDocs <BalFwdRecord> BalFwdData = null; var MCSB691_Primitive = new List <string>(); DocType docType; string filename , fileExtension; foreach (string filePath in files) { filename = Path.GetFileName(filePath); fileExtension = Path.GetExtension(filePath); docType = IdentifyDoc(filename); // put each document type into its headersource (struct of Stringmap and headers list) switch (docType) { case (DocType.Statement): MCSB691_Primitive = ImportRows(filename); HeaderSource <List <StringMap>, List <string> > src691 = Specific.InvoiceData.ParseM691(MCSB691_Primitive.ToArray()); InvoiceData = new KeyedDocs <InvoiceData>(src691); break; case (DocType.BalanceForward): var M690sByAcctID = ImportCSVWithHeader( filePath // , primaryKey: "Group Billing Acct ID" , delimiter: "|" , useQuotes: false); MemberData = new KeyedDocs <MemberRecord>(M690sByAcctID); break; case (DocType.Members): string[] headers = BalFwdRecord.headers; var M504AsByAcctID = ImportCSVWithHeader( filePath // , primaryKey: "Account Id" , "," , useQuotes: true , headers); BalFwdData = new KeyedDocs <BalFwdRecord>(M504AsByAcctID); break; case (DocType.Error): throw new Exception($"Unexpected file found: {filePath}"); } } return(new ValueTuple < KeyedDocs <InvoiceData> , KeyedDocs <MemberRecord> , KeyedDocs <BalFwdRecord> >( InvoiceData, MemberData, BalFwdData)); }