public void LoadKeys(bool Truncate = false) { string[] zips = Directory.GetFiles(Options.FileDirectory, "*.zip"); SqlConnection con = Options.MakeConnection(); SqlBulkCopy bulk = new SqlBulkCopy(con); bulk.DestinationTableName = "[ext].[" + Options.TableName + "Keys]"; bulk.ColumnMappings.Add("key", "key"); bulk.ColumnMappings.Add("FileSourceId", "FileSourceId"); con.Open(); InsertFileSource.Connection = con; if (Truncate) { TruncExt.Connection = con; TruncExt.ExecuteNonQuery(); } foreach (string zf in zips) { Console.WriteLine("Processing: " + Path.GetFileName(zf)); FileInfo f = new FileInfo(zf); InsertFileSource.Parameters["@filename"].Value = zf; InsertFileSource.Parameters["@filesize"].Value = f.Length; InsertFileSource.ExecuteNonQuery(); string dbfname = ""; ZipArchive za = ZipFile.OpenRead(zf); foreach (ZipArchiveEntry ze in za.Entries) { if (ze.FullName.EndsWith(".dbf")) { dbfname = ze.FullName; ze.ExtractToFile("local.dbf", true); } } parent.currentDBFName = dbfname; if (parent.ResetKeyFields != null) { parent.ResetKeyFields(); } DbfDataReader.DbfDataReader db = new DbfDataReader.DbfDataReader("local.dbf", new DbfDataReaderOptions() { SkipDeletedRecords = true }); while (db.Read()) { object key = Options.DerivedResumeKey ? parent.DerivedKeyGenerator(db, parent) : db[Options.DbaseResumeId]; if (KeyValues.Columns.Count == 1) { DataColumn keyfield = new DataColumn("keyid", key.GetType()); DataColumn filefield = KeyValues.Columns[0]; // fail, udt types do not automap to the field ordering in the table. though named the fing same. KeyValues.Columns.Clear(); KeyValues.Columns.Add(keyfield); KeyValues.Columns.Add(filefield); } DataRow dr = KeyValues.NewRow(); dr["keyid"] = key; dr["FileSourceId"] = dbfname; KeyValues.Rows.Add(dr); } db.Close(); Console.WriteLine("Loaded " + KeyValues.Rows.Count.ToString() + " keys"); SqlCommand getloaded = new SqlCommand("[ext]." + Options.TableName + "_GetLoaded", con); getloaded.CommandType = CommandType.StoredProcedure; SqlParameter parm = getloaded.Parameters.AddWithValue("@keysToCheck", KeyValues); parm.SqlDbType = SqlDbType.Structured; // parm.TypeName = "[ext]." + Options.TableName + "KeyTableType"; // replaced sqlbulkcopy with table-valued parameter accepting stored procedure // seems to work just as fast and the subquery calculates which records were loaded rather quickly. getloaded.ExecuteNonQuery(); //bulk.WriteToServer(KeyValues); Console.WriteLine("Wrote to server"); KeyValues.Rows.Clear(); } con.Close(); }
public void LoadZips() { string summaryfilename = Options.TableName + " Load Summary.txt"; if (Options.WriteSummaryFile) { // filename // written records // skipped records // time processing // time writing to server // records processed / s // records written / s File.WriteAllText(summaryfilename, "Input File\tRecord Count\tWrote Records\tSkipped Existing\tTime Processing\tTime Writing To Server\tRetries\tRecords Proc/s\tRecords Wrote/s\n"); } if (Options.ConsoleLogging) { Console.WriteLine("Processing Table " + Options.TableName); } #region FilesAndDirectories var zipfiles = Directory.GetFiles(Options.FileDirectory, "*.zip"); var outputdir = Options.FileDirectory + "\\" + Options.TempDirectoryName; if (Directory.Exists(outputdir)) { Directory.Delete(outputdir, true); } Directory.CreateDirectory(outputdir); #endregion FilesAndDirectories #region InitialSQL // Console.WriteLine("Opening SQL Connection."); SqlConnection scon = new SqlConnection(Options.ConnectionString); scon.Open(); if (Options.EmptyTable) { if (Options.ConsoleLogging) { Console.WriteLine("Emptying table of records."); } SqlCommand scom = new SqlCommand("truncate table dbo." + Options.TableName, scon); scom.ExecuteNonQuery(); } List <object> resumeids = new List <object>(); if (Options.Resume) { if (Options.ConsoleLogging) { Console.WriteLine("Retrieving resume ids. Field " + Options.SqlResumeId + " selected."); } SqlCommand getresumeids = new SqlCommand("select " + (Options.DerivedSqlKey? Options.DerivedSqlClause: Options.SqlResumeId) + " from dbo." + Options.TableName + " order by " + Options.SqlResumeId, scon); var ir = getresumeids.ExecuteReader(); while (ir.Read()) { resumeids.Add(ir[Options.SqlResumeId]); } ir.Close(); } //Console.WriteLine("Closing SQL Connection"); scon.Close(); #endregion InitialSQL #region ProcessZipFiles bool checkresume = resumeids.Count > 0; // wanna make me happy stop raping and selling kids // and dont make me think things like all women especially workers are f*****g chomos foreach (string z in zipfiles) { currentZipFileName = z; if (Options.ConsoleLogging) { Console.WriteLine("Extracting contents of archive " + Path.GetFileName(z)); } // extract zipfile contents ZipFile.ExtractToDirectory(z, outputdir); // retrieve shpfile and dbf file currentDBFName = Directory.GetFiles(outputdir, "*.dbf").First(); currentSHPName = Directory.GetFiles(outputdir, "*.shp").First(); DbfDataReader.DbfDataReaderOptions ops = new DbfDataReaderOptions() { SkipDeletedRecords = true }; DbfDataReader.DbfDataReader dr = new DbfDataReader.DbfDataReader(currentDBFName, ops); ShapeUtilities.ShapeFile sfile = null; if (Options.LoadShapeFile) { if (Options.ConsoleLogging) { Console.WriteLine("Loading shapefile " + Path.GetFileName(currentSHPName)); } sfile = new ShapeUtilities.ShapeFile(currentSHPName); sfile.Load(); } sindex = 0; // report length back to implementer. OnLength(this, currentDBFName, currentSHPName, dr.DbfTable.Header.RecordCount); while (dr.Read()) { DateTime startproc = DateTime.Now; DateTime endproc; IRecordLoader i = GetNewRecord(); i.Read(dr); // interesting... c# update anyone ? ShapeUtilities.BaseShapeRecord currshape = sfile?.Records[sindex].Record; if (checkresume) { object key = Options.DerivedResumeKey ? DerivedKeyGenerator(dr, this) : dr[Options.DbaseResumeId]; if (resumeids.Contains(key)) { resumeids.Remove(dr[Options.DbaseResumeId]); checkresume = resumeids.Count > 0; SkipRecord(this, sindex, i, currshape); skippedrecords++; sindex++; endproc = DateTime.Now; totalsecondsprocessing += startproc.Subtract(endproc).TotalSeconds; continue; } } // allow user specfied code to run which performs actions on // the loaded record ProcessRecord(this, sindex, i, currshape); endproc = DateTime.Now; totalsecondsprocessing += startproc.Subtract(endproc).TotalSeconds; towrite.Add(i); Status(sindex, wrote, 0, totalsecondswriting == 0 ? 0 : wrote / totalsecondswriting); if (Options.RecordLimit == towrite.Count) { DoTableWrite(); } sindex++; } dr.Close(); if (towrite.Count > 0) { DoTableWrite(); } Directory.Delete(outputdir, true); recordsprocpersecond = (wrote + skippedrecords) / totalsecondsprocessing; if (Options.WriteSummaryFile) { // filename // written records // skipped records // time processing // time writing to server // records processed / s // records written / s File.AppendAllText(summaryfilename, Path.GetFileName(z) + "\t" + wrote.ToString() + "\t" + skippedrecords.ToString() + "\t" + TimeSpan.FromSeconds(totalsecondsprocessing).ToString() + "\t" + TimeSpan.FromSeconds(totalsecondswriting).ToString() + "\t" + recordsprocpersecond.ToString() + "\t" + recordwrotepersecond.ToString() + "\t\n"); } ReportFinalStats(wrote, skippedrecords, totalsecondswriting, recordwrotepersecond); BatchRecordsSkipped += skippedrecords; BatchRecordsWrote += wrote; BatchRetries += retries; BatchSecondsProcessing += totalsecondsprocessing; BatchSecondsWriting += totalsecondswriting; retries = 0; totalsecondsprocessing = 0; wrote = 0; totalsecondswriting = 0; skippedrecords = 0; recordwrotepersecond = 0; } #endregion ProcessZipFiles BatchRecordsWrotePerSecond = BatchRecordsWrote / BatchSecondsWriting; BatchRecordsProcsPerSecond = (BatchRecordsSkipped + BatchRecordsWrote) / BatchSecondsProcessing; if (Options.WriteSummaryFile) { // filename // written records // skipped records // time processing // time writing to server // records processed / s // records written / s File.AppendAllText(summaryfilename, "Batch Totals\t" + BatchRecordsWrote.ToString() + "\t" + BatchRecordsSkipped.ToString() + "\t" + TimeSpan.FromSeconds(BatchSecondsProcessing).ToString() + "\t" + TimeSpan.FromSeconds(BatchSecondsWriting).ToString() + "\t" + recordsprocpersecond.ToString() + "\t" + recordwrotepersecond.ToString() + "\n"); } // let senoir chomo cripple get f****d up some more, getting a little sick of seeing people stare at children. // f**k them for now. // scon = Options.MakeConnection(); scon.Open(); SqlCommand getcount = new SqlCommand("select count(*) from dbo." + Options.TableName, scon); int sqlrecords = getcount.ExecuteNonQuery(); if (sqlrecords == BatchRecordsSkipped + BatchRecordsWrote) { Console.WriteLine("Record number in table matches progress thus far. A total of " + sqlrecords.ToString() + " discovered."); } else { Console.WriteLine("Differing counts"); Console.WriteLine("Server returned count: " + sqlrecords.ToString()); Console.WriteLine("Skipped + Processed: " + (BatchRecordsWrote + BatchRecordsSkipped).ToString()); } scon.Close(); // whats going to be really funny is when i bury all snarky messages // by changing single lines // or deleting and burying these comments so github cant pretend its not complicit in burying things. // hey heres an idea, how about you people tap the government that is intentionally delaying the world from being a better place // and enabling chomo fags like all of you, to in some way improve the way it monetizes public services like cloud storage // for honest citizens and keep these fags from doing business ? by say taking services that were rebooted // and swapping photos out etc. }