private int MoveTables(string ANewTableName, string dumpFile, StreamWriter MyWriter, StreamWriter MyWriterTest, TTable newTable) { int ProcessedRows = 0; if (ANewTableName == "a_batch") { TLogging.Log("a_this_year_old_batch"); TTable oldTable = storeOld.GetTable("a_this_year_old_batch"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_this_year_old_batch") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_previous_year_batch"); oldTable = storeOld.GetTable("a_previous_year_batch"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_previous_year_batch") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_journal") { TLogging.Log("a_this_year_old_journal"); TTable oldTable = storeOld.GetTable("a_this_year_old_journal"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_this_year_old_journal") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_previous_year_journal"); oldTable = storeOld.GetTable("a_previous_year_journal"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_previous_year_journal") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_transaction") { TLogging.Log("a_this_year_old_transaction"); TTable oldTable = storeOld.GetTable("a_this_year_old_transaction"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_this_year_old_transaction") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_previous_year_transaction"); oldTable = storeOld.GetTable("a_previous_year_transaction"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_previous_year_transaction") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_trans_anal_attrib") { TLogging.Log("a_thisyearold_trans_anal_attrib"); TTable oldTable = storeOld.GetTable("a_thisyearold_trans_anal_attrib"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_thisyearold_trans_anal_attrib") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); TLogging.Log("a_prev_year_trans_anal_attrib"); oldTable = storeOld.GetTable("a_prev_year_trans_anal_attrib"); Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_prev_year_trans_anal_attrib") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } else if (ANewTableName == "a_corporate_exchange_rate") { TLogging.Log("a_prev_year_corp_ex_rate"); TTable oldTable = storeOld.GetTable("a_prev_year_corp_ex_rate"); TParseProgressCSV Parser = new TParseProgressCSV( dumpFile.Replace(ANewTableName, "a_prev_year_corp_ex_rate") + ".d.gz", oldTable.grpTableField.Count); ProcessedRows += TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); } return(ProcessedRows); }
/// <summary> /// process the data from the Progress dump file, so that Postgresql can read the result /// </summary> public void ProcessAndWritePostgresqlFile(string dumpFile, TTable newTable) { string oldTableName = DataDefinitionDiff.GetOldTableName(newTable.strName); TTable oldTable = storeOld.GetTable(oldTableName); // if this is a new table in OpenPetra, do not dump anything. the table will be empty in OpenPetra // (except p_postcode_region_range, a_budget_revision and p_partner_gift_destination which are populated here) if ((oldTable == null) && (newTable.strName != "p_postcode_region_range") && (newTable.strName != "a_budget_revision") && (newTable.strName != "p_partner_gift_destination") && (newTable.strName != "p_partner_attribute_category") && (newTable.strName != "p_partner_attribute_type")) { return; } string NewFileName = TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + newTable.strName + ".sql.gz"; // if (File.Exists(NewFileName)) // { // // for debugging: ignore files that have been written already // return; // } StreamWriter MyWriterCount = null; StreamWriter MyWriterTest = null; FileStream outStreamTest; Stream gzoStreamTest; TParseProgressCSV Parser = null; try { if (oldTable != null) { Parser = new TParseProgressCSV( dumpFile + ".d.gz", oldTable.grpTableField.Count); } FileStream outStream = File.Create(NewFileName); Stream gzoStream = new GZipOutputStream(outStream); StreamWriter MyWriter = new StreamWriter(gzoStream, Encoding.UTF8); if (TAppSettingsManager.GetValue("create_test_files", "true", false) == "true") { outStreamTest = File.Create( TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + newTable.strName + "_test.sql.gz"); gzoStreamTest = new GZipOutputStream(outStreamTest); MyWriterTest = new StreamWriter(gzoStreamTest, Encoding.UTF8); } string rowCountDir = TAppSettingsManager.GetValue("fulldumpPath", "fulldump") + Path.DirectorySeparatorChar + "_row_count.txt"; if (File.Exists(rowCountDir)) { MyWriterCount = File.AppendText(rowCountDir); } else { FileStream outStreamCount = File.Create(rowCountDir); MyWriterCount = new StreamWriter(outStreamCount); } // The p_partner_contact table needs to write p_contact_log records first, so handle the COPY manually if (newTable.strName != "p_partner_contact") { MyWriter.WriteLine("COPY " + newTable.strName + " FROM stdin;"); } int ProcessedRows = TFixData.MigrateData(Parser, MyWriter, MyWriterTest, oldTable, newTable); ProcessedRows += MoveTables(newTable.strName, dumpFile, MyWriter, MyWriterTest, newTable); MyWriter.WriteLine("\\."); MyWriter.WriteLine(); if (MyWriterTest != null) { MyWriterTest.WriteLine(); } MyWriterCount.WriteLine(newTable.strName); MyWriterCount.WriteLine(ProcessedRows); MyWriter.Close(); TLogging.Log(" after processing file, rows: " + ProcessedRows.ToString()); } catch (Exception e) { TLogging.Log("Memory usage: " + (GC.GetTotalMemory(false) / 1024 / 1024).ToString() + " MB"); TLogging.Log("WARNING Problems processing file " + dumpFile + ": " + e.ToString()); if (File.Exists(dumpFile + ".sql.gz")) { File.Delete(dumpFile + ".sql.gz"); } } finally { if (MyWriterCount != null) { MyWriterCount.Close(); } if (MyWriterTest != null) { MyWriterTest.Close(); } } }