/// <summary> /// process the data from the Progress dump file, so that Postgresql can read the result /// </summary> public void ProcessAndWritePostgresqlFile(string dumpFile, TTable newTable) { string oldTableName = DataDefinitionDiff.GetOldTableName(newTable.strName); TTable oldTable = storeOld.GetTable(oldTableName); // if this is a new table in OpenPetra, do not dump anything. the table will be empty in OpenPetra // (except p_postcode_region_range, a_budget_revision and p_partner_gift_destination which are populated here) if ((oldTable == null) && (newTable.strName != "p_postcode_region_range") && (newTable.strName != "a_budget_revision") && (newTable.strName != "p_partner_gift_destination") && (newTable.strName != "p_partner_attribute_category") && (newTable.strName != "p_partner_attribute_type")) { return; } string NewFileName = TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + newTable.strName + ".sql.gz"; // if (File.Exists(NewFileName)) // { // // for debugging: ignore files that have been written already // return; // } StreamWriter MyWriterCount = null; StreamWriter MyWriterTest = null; FileStream outStreamTest; Stream gzoStreamTest; TParseProgressCSV Parser = null; try { if (oldTable != null) { Parser = new TParseProgressCSV( dumpFile + ".d.gz", oldTable.grpTableField.Count); } FileStream outStream = File.Create(NewFileName); Stream gzoStream = new GZipOutputStream(outStream); StreamWriter MyWriter = new StreamWriter(gzoStream, Encoding.UTF8); if (TAppSettingsManager.GetValue("create_test_files", "true", false) == "true") { outStreamTest = File.Create( TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + newTable.strName + "_test.sql.gz"); gzoStreamTest = new GZipOutputStream(outStreamTest); MyWriterTest = new StreamWriter(gzoStreamTest, Encoding.UTF8); } string rowCountDir = TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + "_row_count.txt"; if (File.Exists(rowCountDir)) { MyWriterCount = File.AppendText(rowCountDir); } else { FileStream outStreamCount = File.Create(rowCountDir); MyWriterCount = new StreamWriter(outStreamCount); } // The p_partner_contact table needs to write p_contact_log records first, so handle the COPY manually if (newTable.strName != "p_partner_contact") { MyWriter.WriteLine("COPY " + newTable.strName + " FROM stdin;"); } int ProcessedRows = TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); ProcessedRows += MoveTables(newTable.strName, dumpFile, MyWriter, MyWriterTest, newTable); MyWriter.WriteLine("\\."); MyWriter.WriteLine(); if (MyWriterTest != null) { MyWriterTest.WriteLine(); } MyWriterCount.WriteLine(newTable.strName); MyWriterCount.WriteLine(ProcessedRows); MyWriter.Close(); TLogging.Log(" after processing file, rows: " + ProcessedRows.ToString()); } catch (Exception e) { TLogging.Log("Memory usage: " + (GC.GetTotalMemory(false) / 1024 / 1024).ToString() + " MB"); TLogging.Log("WARNING Problems processing file " + dumpFile + ": " + e.ToString()); if (File.Exists(dumpFile + ".sql.gz")) { File.Delete(dumpFile + ".sql.gz"); } } finally { if (MyWriterCount != null) { MyWriterCount.Close(); } if (MyWriterTest != null) { MyWriterTest.Close(); } } }
private int MoveTables(string ANewTableName, string dumpFile, StreamWriter MyWriter, StreamWriter MyWriterTest, TTable newTable) { int ProcessedRows = 0; if (ANewTableName == "a_batch") { TLogging.Log("a_this_year_old_batch"); TTable oldTable = storeOld.GetTable("a_this_year_old_batch"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_this_year_old_batch") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_previous_year_batch"); oldTable = storeOld.GetTable("a_previous_year_batch"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_previous_year_batch") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_journal") { TLogging.Log("a_this_year_old_journal"); TTable oldTable = storeOld.GetTable("a_this_year_old_journal"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_this_year_old_journal") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_previous_year_journal"); oldTable = storeOld.GetTable("a_previous_year_journal"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_previous_year_journal") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_transaction") { TLogging.Log("a_this_year_old_transaction"); TTable oldTable = storeOld.GetTable("a_this_year_old_transaction"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_this_year_old_transaction") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_previous_year_transaction"); oldTable = storeOld.GetTable("a_previous_year_transaction"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_previous_year_transaction") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_trans_anal_attrib") { TLogging.Log("a_thisyearold_trans_anal_attrib"); TTable oldTable = storeOld.GetTable("a_thisyearold_trans_anal_attrib"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_thisyearold_trans_anal_attrib") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_prev_year_trans_anal_attrib"); oldTable = storeOld.GetTable("a_prev_year_trans_anal_attrib"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_prev_year_trans_anal_attrib") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_corporate_exchange_rate") { TLogging.Log("a_prev_year_corp_ex_rate"); TTable oldTable = storeOld.GetTable("a_prev_year_corp_ex_rate"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_prev_year_corp_ex_rate") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } return(ProcessedRows); }
/// <summary> /// Populate the empty table PPartnerGiftDestination using PmStaffData /// </summary> public static int PopulatePPartnerGiftDestination(StringCollection AColumnNames, ref string[] ANewRow, StreamWriter AWriter, StreamWriter AWriterTest) { List <string[]> ActiveCommitments = new List <string[]>(); List <string[]> Persons = new List <string[]>(); int RowCounter = 0; // default for all new records SetValue(AColumnNames, ref ANewRow, "p_active_l", "\\N"); SetValue(AColumnNames, ref ANewRow, "p_default_gift_destination_l", "\\N"); SetValue(AColumnNames, ref ANewRow, "p_partner_class_c", "\\N"); SetValue(AColumnNames, ref ANewRow, "p_commitment_site_key_n", "\\N"); SetValue(AColumnNames, ref ANewRow, "p_commitment_key_n", "\\N"); SetValue(AColumnNames, ref ANewRow, "p_comment_c", "\\N"); SetValue(AColumnNames, ref ANewRow, "s_date_created_d", "\\N"); SetValue(AColumnNames, ref ANewRow, "s_created_by_c", "\\N"); SetValue(AColumnNames, ref ANewRow, "s_date_modified_d", "\\N"); SetValue(AColumnNames, ref ANewRow, "s_modified_by_c", "\\N"); SetValue(AColumnNames, ref ANewRow, "s_modification_id_t", "\\N"); // load the file pm_staff_data.d.gz TTable StaffDataTable = TDumpProgressToPostgresql.GetStoreOld().GetTable("pm_staff_data"); TParseProgressCSV StaffDataParser = new TParseProgressCSV( TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + "pm_staff_data.d.gz", StaffDataTable.grpTableField.Count); StringCollection StaffDataColumnNames = GetColumnNames(StaffDataTable); // find which records are currently active while (true) { string[] StaffDataRow = StaffDataParser.ReadNextRow(); if (StaffDataRow == null) { break; } string strStartOfCommitment = GetValue(StaffDataColumnNames, StaffDataRow, "pm_start_of_commitment_d"); string strEndOfCommitment = GetValue(StaffDataColumnNames, StaffDataRow, "pm_end_of_commitment_d"); try { // if commitment is currently active if ((DateTime.ParseExact(strStartOfCommitment, "dd/mm/yyyy", CultureInfo.InvariantCulture) <= DateTime.Today) && ((strEndOfCommitment == "\\N") || (DateTime.ParseExact(strEndOfCommitment, "dd/mm/yyyy", CultureInfo.InvariantCulture) >= DateTime.Today)) && (strStartOfCommitment != strEndOfCommitment)) { ActiveCommitments.Add(StaffDataRow); } } catch { TLogging.Log("WARNING: Invalid date in commitment: " + strStartOfCommitment + " or " + strEndOfCommitment); } } // load the file p_person.d.gz TTable PersonTable = TDumpProgressToPostgresql.GetStoreOld().GetTable("p_person"); TParseProgressCSV PersonParser = new TParseProgressCSV( TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + "p_person.d.gz", PersonTable.grpTableField.Count); StringCollection PersonColumnNames = GetColumnNames(PersonTable); SortedList <string, List <PersonKeyAndRow> > FamilyKeysWithPersons = new SortedList <string, List <PersonKeyAndRow> >(); // add all Persons to a list while (true) { string[] PersonRow = PersonParser.ReadNextRow(); if (PersonRow == null) { break; } string familyKey = GetValue(PersonColumnNames, PersonRow, "p_family_key_n"); if (!FamilyKeysWithPersons.ContainsKey(familyKey)) { FamilyKeysWithPersons.Add(familyKey, new List <PersonKeyAndRow>()); } FamilyKeysWithPersons[familyKey].Add( new PersonKeyAndRow( GetValue(PersonColumnNames, PersonRow, "p_partner_key_n"), PersonRow)); Persons.Add(PersonRow); } // load the file p_family.d.gz TTable FamilyTable = TDumpProgressToPostgresql.GetStoreOld().GetTable("p_family"); TParseProgressCSV FamilyParser = new TParseProgressCSV( TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + "p_family.d.gz", FamilyTable.grpTableField.Count); StringCollection FamilyColumnNames = GetColumnNames(FamilyTable); // read through each family while (true) { string[] FamilyRow = FamilyParser.ReadNextRow(); if (FamilyRow == null) { break; } string familykey = GetValue(FamilyColumnNames, FamilyRow, "p_partner_key_n"); // find Person partners belonging to the family bool CommitmentFound = false; int MinimumFamilyId = int.MaxValue; // if family contains Persons if (FamilyKeysWithPersons.ContainsKey(familykey)) { // read through each of the Family's Persons foreach (PersonKeyAndRow PersonRecord in FamilyKeysWithPersons[familykey]) { // find if the Person has a currently active commitment string[] Commitment = ActiveCommitments.Find(e => GetValue(StaffDataColumnNames, e, "p_partner_key_n") == PersonRecord.PersonKey); // if currently active commitment exists create a new Gift Destination record if (Commitment != null) { int CurrentFamilyId = Convert.ToInt32(GetValue(PersonColumnNames, PersonRecord.PersonRow, "p_old_omss_family_id_i")); if (CurrentFamilyId < MinimumFamilyId) { SetValue(AColumnNames, ref ANewRow, "p_key_i", RowCounter.ToString()); SetValue(AColumnNames, ref ANewRow, "p_partner_key_n", GetValue(FamilyColumnNames, FamilyRow, "p_partner_key_n")); SetValue(AColumnNames, ref ANewRow, "p_field_key_n", GetValue(StaffDataColumnNames, Commitment, "pm_target_field_n")); SetValue(AColumnNames, ref ANewRow, "p_comment_c", "\\N"); TTableField tf = new TTableField(); tf.strName = "pm_start_of_commitment_d"; tf.strType = "DATE"; SetValue(AColumnNames, ref ANewRow, "p_date_effective_d", TFixData.FixValue(GetValue(StaffDataColumnNames, Commitment, "pm_start_of_commitment_d"), tf)); tf.strName = "pm_end_of_commitment_d"; SetValue(AColumnNames, ref ANewRow, "p_date_expires_d", TFixData.FixValue(GetValue(StaffDataColumnNames, Commitment, "pm_end_of_commitment_d"), tf)); CommitmentFound = true; MinimumFamilyId = CurrentFamilyId; // there can only be one active gift destination per family break; } } } } // if no active commitment is found then search for a "p_om_field_key_n" and use that to create a gift destination if (!CommitmentFound) { string OMFieldKey = GetValue(FamilyColumnNames, FamilyRow, "p_om_field_key_n"); if ((OMFieldKey != "\\N") && (OMFieldKey != "0")) { SetValue(AColumnNames, ref ANewRow, "p_key_i", RowCounter.ToString()); SetValue(AColumnNames, ref ANewRow, "p_partner_key_n", GetValue(FamilyColumnNames, FamilyRow, "p_partner_key_n")); SetValue(AColumnNames, ref ANewRow, "p_field_key_n", GetValue(FamilyColumnNames, FamilyRow, "p_om_field_key_n")); DateTime LastYear = DateTime.Today.AddYears(-1); SetValue(AColumnNames, ref ANewRow, "p_date_effective_d", string.Format("{0}-{1}-{2}", LastYear.Year, LastYear.Month, LastYear.Day)); SetValue(AColumnNames, ref ANewRow, "p_date_expires_d", "\\N"); SetValue(AColumnNames, ref ANewRow, "p_comment_c", Catalog.GetString("Copied from Petra's OM Field Key.")); CommitmentFound = true; } } // write gift destination to file if (CommitmentFound) { AWriter.WriteLine(StringHelper.StrMerge(ANewRow, '\t').Replace("\\\\N", "\\N").ToString()); if (AWriterTest != null) { AWriterTest.WriteLine("BEGIN; " + "COPY p_partner_gift_destination FROM stdin;"); AWriterTest.WriteLine(StringHelper.StrMerge(ANewRow, '\t').Replace("\\\\N", "\\N").ToString()); AWriterTest.WriteLine("\\."); AWriterTest.WriteLine("ROLLBACK;"); } RowCounter++; } } return(RowCounter); }