static ScheduleData ExctractScheduleData(string path) { Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); var options = new DbfDataReaderOptions { SkipDeletedRecords = true, Encoding = DbfDataReader.EncodingProvider.GetEncoding(852) }; var list = new List <ScheduleEntry>(); using (var dbfDataReader = new DbfDataReader.DbfDataReader(path, options)) { while (dbfDataReader.Read()) { var entry = new ScheduleEntry { Day = dbfDataReader.GetString(0), Hour = dbfDataReader.GetString(1), Class = dbfDataReader.GetString(3), Group = dbfDataReader.GetString(4), SubjectShortcut = dbfDataReader.GetString(5), Subject = dbfDataReader.GetString(6), TeacherFirstName = dbfDataReader.GetString(9), TeacherLastName = dbfDataReader.GetString(8), SpaceName = dbfDataReader.GetString(10) }; list.Add(entry); } } return(new ScheduleData { ScheduleEntries = list }); }
private static void DoBulkCopy(SqlConnection connection, Options options) { Console.WriteLine("Begin bulk copy"); var stopwatch = new Stopwatch(); stopwatch.Start(); var rowsCopied = 0L; var dbfRecordCount = 0L; var dbfDataReaderOptions = new DbfDataReaderOptions { SkipDeletedRecords = options.SkipDeletedRecords }; using (var dbfDataReader = new DbfDataReader.DbfDataReader(options.Dbf, dbfDataReaderOptions)) { dbfRecordCount = dbfDataReader.DbfTable.Header.RecordCount; using (var bulkCopy = new SqlBulkCopy(connection)) { bulkCopy.BulkCopyTimeout = options.BulkCopyTimeout; bulkCopy.DestinationTableName = options.Table; try { bulkCopy.WriteToServer(dbfDataReader); rowsCopied = bulkCopy.RowsCopied(); } catch (Exception ex) { Console.WriteLine($"Error importing: dbf file: '{options.Dbf}', exception: {ex.Message}"); } } } stopwatch.Stop(); Console.WriteLine($"Bulk copy completed in {GetElapsedTime(stopwatch)}s"); Console.WriteLine($"Copied {rowsCopied} of {dbfRecordCount} rows"); }
public void LoadKeys(bool Truncate = false) { string[] zips = Directory.GetFiles(Options.FileDirectory, "*.zip"); SqlConnection con = Options.MakeConnection(); SqlBulkCopy bulk = new SqlBulkCopy(con); bulk.DestinationTableName = "[ext].[" + Options.TableName + "Keys]"; bulk.ColumnMappings.Add("key", "key"); bulk.ColumnMappings.Add("FileSourceId", "FileSourceId"); con.Open(); InsertFileSource.Connection = con; if (Truncate) { TruncExt.Connection = con; TruncExt.ExecuteNonQuery(); } foreach (string zf in zips) { Console.WriteLine("Processing: " + Path.GetFileName(zf)); FileInfo f = new FileInfo(zf); InsertFileSource.Parameters["@filename"].Value = zf; InsertFileSource.Parameters["@filesize"].Value = f.Length; InsertFileSource.ExecuteNonQuery(); string dbfname = ""; ZipArchive za = ZipFile.OpenRead(zf); foreach (ZipArchiveEntry ze in za.Entries) { if (ze.FullName.EndsWith(".dbf")) { dbfname = ze.FullName; ze.ExtractToFile("local.dbf", true); } } parent.currentDBFName = dbfname; if (parent.ResetKeyFields != null) { parent.ResetKeyFields(); } DbfDataReader.DbfDataReader db = new DbfDataReader.DbfDataReader("local.dbf", new DbfDataReaderOptions() { SkipDeletedRecords = true }); while (db.Read()) { object key = Options.DerivedResumeKey ? parent.DerivedKeyGenerator(db, parent) : db[Options.DbaseResumeId]; if (KeyValues.Columns.Count == 1) { DataColumn keyfield = new DataColumn("keyid", key.GetType()); DataColumn filefield = KeyValues.Columns[0]; // fail, udt types do not automap to the field ordering in the table. though named the fing same. KeyValues.Columns.Clear(); KeyValues.Columns.Add(keyfield); KeyValues.Columns.Add(filefield); } DataRow dr = KeyValues.NewRow(); dr["keyid"] = key; dr["FileSourceId"] = dbfname; KeyValues.Rows.Add(dr); } db.Close(); Console.WriteLine("Loaded " + KeyValues.Rows.Count.ToString() + " keys"); SqlCommand getloaded = new SqlCommand("[ext]." + Options.TableName + "_GetLoaded", con); getloaded.CommandType = CommandType.StoredProcedure; SqlParameter parm = getloaded.Parameters.AddWithValue("@keysToCheck", KeyValues); parm.SqlDbType = SqlDbType.Structured; // parm.TypeName = "[ext]." + Options.TableName + "KeyTableType"; // replaced sqlbulkcopy with table-valued parameter accepting stored procedure // seems to work just as fast and the subquery calculates which records were loaded rather quickly. getloaded.ExecuteNonQuery(); //bulk.WriteToServer(KeyValues); Console.WriteLine("Wrote to server"); KeyValues.Rows.Clear(); } con.Close(); }
public void LoadZips() { string summaryfilename = Options.TableName + " Load Summary.txt"; if (Options.WriteSummaryFile) { // filename // written records // skipped records // time processing // time writing to server // records processed / s // records written / s File.WriteAllText(summaryfilename, "Input File\tRecord Count\tWrote Records\tSkipped Existing\tTime Processing\tTime Writing To Server\tRetries\tRecords Proc/s\tRecords Wrote/s\n"); } if (Options.ConsoleLogging) { Console.WriteLine("Processing Table " + Options.TableName); } #region FilesAndDirectories var zipfiles = Directory.GetFiles(Options.FileDirectory, "*.zip"); var outputdir = Options.FileDirectory + "\\" + Options.TempDirectoryName; if (Directory.Exists(outputdir)) { Directory.Delete(outputdir, true); } Directory.CreateDirectory(outputdir); #endregion FilesAndDirectories #region InitialSQL // Console.WriteLine("Opening SQL Connection."); SqlConnection scon = new SqlConnection(Options.ConnectionString); scon.Open(); if (Options.EmptyTable) { if (Options.ConsoleLogging) { Console.WriteLine("Emptying table of records."); } SqlCommand scom = new SqlCommand("truncate table dbo." + Options.TableName, scon); scom.ExecuteNonQuery(); } List <object> resumeids = new List <object>(); if (Options.Resume) { if (Options.ConsoleLogging) { Console.WriteLine("Retrieving resume ids. Field " + Options.SqlResumeId + " selected."); } SqlCommand getresumeids = new SqlCommand("select " + (Options.DerivedSqlKey? Options.DerivedSqlClause: Options.SqlResumeId) + " from dbo." + Options.TableName + " order by " + Options.SqlResumeId, scon); var ir = getresumeids.ExecuteReader(); while (ir.Read()) { resumeids.Add(ir[Options.SqlResumeId]); } ir.Close(); } //Console.WriteLine("Closing SQL Connection"); scon.Close(); #endregion InitialSQL #region ProcessZipFiles bool checkresume = resumeids.Count > 0; // wanna make me happy stop raping and selling kids // and dont make me think things like all women especially workers are f*****g chomos foreach (string z in zipfiles) { currentZipFileName = z; if (Options.ConsoleLogging) { Console.WriteLine("Extracting contents of archive " + Path.GetFileName(z)); } // extract zipfile contents ZipFile.ExtractToDirectory(z, outputdir); // retrieve shpfile and dbf file currentDBFName = Directory.GetFiles(outputdir, "*.dbf").First(); currentSHPName = Directory.GetFiles(outputdir, "*.shp").First(); DbfDataReader.DbfDataReaderOptions ops = new DbfDataReaderOptions() { SkipDeletedRecords = true }; DbfDataReader.DbfDataReader dr = new DbfDataReader.DbfDataReader(currentDBFName, ops); ShapeUtilities.ShapeFile sfile = null; if (Options.LoadShapeFile) { if (Options.ConsoleLogging) { Console.WriteLine("Loading shapefile " + Path.GetFileName(currentSHPName)); } sfile = new ShapeUtilities.ShapeFile(currentSHPName); sfile.Load(); } sindex = 0; // report length back to implementer. OnLength(this, currentDBFName, currentSHPName, dr.DbfTable.Header.RecordCount); while (dr.Read()) { DateTime startproc = DateTime.Now; DateTime endproc; IRecordLoader i = GetNewRecord(); i.Read(dr); // interesting... c# update anyone ? ShapeUtilities.BaseShapeRecord currshape = sfile?.Records[sindex].Record; if (checkresume) { object key = Options.DerivedResumeKey ? DerivedKeyGenerator(dr, this) : dr[Options.DbaseResumeId]; if (resumeids.Contains(key)) { resumeids.Remove(dr[Options.DbaseResumeId]); checkresume = resumeids.Count > 0; SkipRecord(this, sindex, i, currshape); skippedrecords++; sindex++; endproc = DateTime.Now; totalsecondsprocessing += startproc.Subtract(endproc).TotalSeconds; continue; } } // allow user specfied code to run which performs actions on // the loaded record ProcessRecord(this, sindex, i, currshape); endproc = DateTime.Now; totalsecondsprocessing += startproc.Subtract(endproc).TotalSeconds; towrite.Add(i); Status(sindex, wrote, 0, totalsecondswriting == 0 ? 0 : wrote / totalsecondswriting); if (Options.RecordLimit == towrite.Count) { DoTableWrite(); } sindex++; } dr.Close(); if (towrite.Count > 0) { DoTableWrite(); } Directory.Delete(outputdir, true); recordsprocpersecond = (wrote + skippedrecords) / totalsecondsprocessing; if (Options.WriteSummaryFile) { // filename // written records // skipped records // time processing // time writing to server // records processed / s // records written / s File.AppendAllText(summaryfilename, Path.GetFileName(z) + "\t" + wrote.ToString() + "\t" + skippedrecords.ToString() + "\t" + TimeSpan.FromSeconds(totalsecondsprocessing).ToString() + "\t" + TimeSpan.FromSeconds(totalsecondswriting).ToString() + "\t" + recordsprocpersecond.ToString() + "\t" + recordwrotepersecond.ToString() + "\t\n"); } ReportFinalStats(wrote, skippedrecords, totalsecondswriting, recordwrotepersecond); BatchRecordsSkipped += skippedrecords; BatchRecordsWrote += wrote; BatchRetries += retries; BatchSecondsProcessing += totalsecondsprocessing; BatchSecondsWriting += totalsecondswriting; retries = 0; totalsecondsprocessing = 0; wrote = 0; totalsecondswriting = 0; skippedrecords = 0; recordwrotepersecond = 0; } #endregion ProcessZipFiles BatchRecordsWrotePerSecond = BatchRecordsWrote / BatchSecondsWriting; BatchRecordsProcsPerSecond = (BatchRecordsSkipped + BatchRecordsWrote) / BatchSecondsProcessing; if (Options.WriteSummaryFile) { // filename // written records // skipped records // time processing // time writing to server // records processed / s // records written / s File.AppendAllText(summaryfilename, "Batch Totals\t" + BatchRecordsWrote.ToString() + "\t" + BatchRecordsSkipped.ToString() + "\t" + TimeSpan.FromSeconds(BatchSecondsProcessing).ToString() + "\t" + TimeSpan.FromSeconds(BatchSecondsWriting).ToString() + "\t" + recordsprocpersecond.ToString() + "\t" + recordwrotepersecond.ToString() + "\n"); } // let senoir chomo cripple get f****d up some more, getting a little sick of seeing people stare at children. // f**k them for now. // scon = Options.MakeConnection(); scon.Open(); SqlCommand getcount = new SqlCommand("select count(*) from dbo." + Options.TableName, scon); int sqlrecords = getcount.ExecuteNonQuery(); if (sqlrecords == BatchRecordsSkipped + BatchRecordsWrote) { Console.WriteLine("Record number in table matches progress thus far. A total of " + sqlrecords.ToString() + " discovered."); } else { Console.WriteLine("Differing counts"); Console.WriteLine("Server returned count: " + sqlrecords.ToString()); Console.WriteLine("Skipped + Processed: " + (BatchRecordsWrote + BatchRecordsSkipped).ToString()); } scon.Close(); // whats going to be really funny is when i bury all snarky messages // by changing single lines // or deleting and burying these comments so github cant pretend its not complicit in burying things. // hey heres an idea, how about you people tap the government that is intentionally delaying the world from being a better place // and enabling chomo fags like all of you, to in some way improve the way it monetizes public services like cloud storage // for honest citizens and keep these fags from doing business ? by say taking services that were rebooted // and swapping photos out etc. }
public async Task <List <LabFileUploadErrorMessageDTO> > ValidateLabFileAsync(IFileListEntry fileEntry, MappingDataDTO MappingTemplate) { var ErrorMessage = new List <LabFileUploadErrorMessageDTO>(); int row = 1; try { string path = ""; List <ParameterDTO> objParamList = new List <ParameterDTO>(); var searchModel = new ParameterDTO() { prm_code_major = "UPLOAD_PATH" }; objParamList = await _apiHelper.GetDataListByModelAsync <ParameterDTO, ParameterDTO>("dropdownlist_api/GetParameterList", searchModel); if (objParamList.FirstOrDefault(x => x.prm_code_minor == "PATH") != null) { path = objParamList.FirstOrDefault(x => x.prm_code_minor == "PATH").prm_value; } else { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'E', lfu_Err_no = 1, lfu_Err_Column = "", lfu_Err_Message = "ไม่พบ Config PATH กรุณาติดต่อผู้ดูแลระบบ " }); return(ErrorMessage); } string str_CurrentDate = DateTime.Now.ToString("yyyyMMdd"); path = Path.Combine(path, str_CurrentDate, MappingTemplate.mp_hos_code); bool exists = System.IO.Directory.Exists(path); if (!exists) { System.IO.Directory.CreateDirectory(path); } path = Path.Combine(path, fileEntry.Name); using (FileStream file = new FileStream(path, FileMode.Create)) { try { await fileEntry.Data.CopyToAsync(file); } catch (Exception ex) { } finally { file.Flush(); } } WHONetMappingSearch searchWHONet = new WHONetMappingSearch(); searchWHONet.wnm_mappingid = MappingTemplate.mp_id; searchWHONet.wnm_mst_code = MappingTemplate.mp_mst_code; List <WHONetMappingListsDTO> WHONetColumn = await _apiHelper.GetDataListByModelAsync <WHONetMappingListsDTO, WHONetMappingSearch>("mapping_api/Get_WHONetMappingListByModel", searchWHONet); var WHONetColumnMandatory = WHONetColumn.Where(x => x.wnm_mandatory == true); #region ReadExcel if (Path.GetExtension(fileEntry.Name) == ".xls" || Path.GetExtension(fileEntry.Name) == ".xlsx") { ExcelDataSetConfiguration option = new ExcelDataSetConfiguration(); using (var stream = File.Open(path, FileMode.Open, FileAccess.Read)) { using (var reader = ExcelReaderFactory.CreateReader(stream)) { DataSet result = new DataSet(); //First row is header if (MappingTemplate.mp_firstlineisheader == true) { result = reader.AsDataSet(new ExcelDataSetConfiguration() { ConfigureDataTable = (tableReader) => new ExcelDataTableConfiguration() { UseHeaderRow = true } } ); } else { result = reader.AsDataSet(); } ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'I', lfu_Err_type = 'I', lfu_Err_no = 1, lfu_Err_Column = "Total", lfu_Err_Message = result.Tables[0].Rows.Count.ToString() }); foreach (WHONetMappingListsDTO item in WHONetColumnMandatory) { var wnm_originalfield = item.wnm_originalfield; if (MappingTemplate.mp_firstlineisheader == false) { int index = 0; Int32.TryParse(item.wnm_originalfield.Replace("Column", ""), out index); item.wnm_originalfield = "Column" + (index - 1); } Boolean columnExists = result.Tables[0].Columns.Contains(item.wnm_originalfield); if (columnExists == false) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'C', lfu_Err_no = 1, lfu_Err_Column = wnm_originalfield, lfu_Err_Message = "ไม่พบ Column " + wnm_originalfield }); } if (columnExists) { var chkResult = result.Tables[0].Select("[" + item.wnm_originalfield + "]" + " is null"); if (chkResult.Length > 0) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'N', lfu_Err_no = 1, lfu_Err_Column = wnm_originalfield, lfu_Err_Message = "กรุณาตรวจสอบข้อมูล Column " + wnm_originalfield + " จะต้องไม่เท่ากับค่าว่าง" }); } //if (item.wnm_type == "Date") //{ // var FieldDateType = result.Tables[0].Columns[item.wnm_originalfield].DataType.ToString(); // if(FieldDateType == "System.String") // { // try // { // } // catch() // { // } // } //} } } var x = ErrorMessage; } } } #endregion #region ReadCSV else if (Path.GetExtension(fileEntry.Name) == ".csv") { using (var stream = File.Open(path, FileMode.Open, FileAccess.Read)) { var reader = ExcelReaderFactory.CreateCsvReader(stream, new ExcelReaderConfiguration() { FallbackEncoding = Encoding.GetEncoding(1252), AutodetectSeparators = new char[] { ',', ';', '\t', '|', '#' }, LeaveOpen = false, AnalyzeInitialCsvRows = 0, }); DataSet result = new DataSet(); ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'I', lfu_Err_type = 'I', lfu_Err_no = 1, lfu_Err_Column = "Total", lfu_Err_Message = result.Tables[0].Rows.Count.ToString() }); //First row is header if (MappingTemplate.mp_firstlineisheader == true) { result = reader.AsDataSet(new ExcelDataSetConfiguration() { ConfigureDataTable = (tableReader) => new ExcelDataTableConfiguration() { UseHeaderRow = true } } ); } else { result = reader.AsDataSet(); } var ee = result.Tables[0]; foreach (WHONetMappingListsDTO item in WHONetColumnMandatory) { var wnm_originalfield = item.wnm_originalfield; if (MappingTemplate.mp_firstlineisheader == false) { int index = 0; Int32.TryParse(item.wnm_originalfield.Replace("Column", ""), out index); item.wnm_originalfield = "Column" + (index - 1); } Boolean columnExists = result.Tables[0].Columns.Contains(item.wnm_originalfield); if (columnExists == false) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'C', lfu_Err_no = 1, lfu_Err_Column = wnm_originalfield, lfu_Err_Message = "ไม่พบ Column " + wnm_originalfield }); } if (columnExists) { var chkResult = result.Tables[0].Select(item.wnm_originalfield + " = ''"); if (chkResult.Length > 0) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'N', lfu_Err_no = 1, lfu_Err_Column = wnm_originalfield, lfu_Err_Message = "กรุณาตรวจสอบข้อมูล Column " + wnm_originalfield + " จะต้องไม่เท่ากับค่าว่าง" }); } } } var x = ErrorMessage; } } #endregion #region ReadText else if (Path.GetExtension(fileEntry.Name) == ".txt") { string line; DataTable dt = new DataTable(); string tempFilename = Guid.NewGuid().ToString() + ".txt"; //var path = Path.Combine(@"D:\Work\02-DMSC ALISS\TEMP\", tempFilename); //var ms = new MemoryStream(); //await fileEntry.Data.CopyToAsync(ms); //using (FileStream file = new FileStream(path, FileMode.Create, FileAccess.Write)) //{ // ms.WriteTo(file); //} //using (TextReader tr = File.OpenText(@"D:\Work\02-DMSC ALISS\TEMP\" + tempFilename)) using (TextReader tr = File.OpenText(path)) { while ((line = tr.ReadLine()) != null) { string[] items = line.Split('\t'); if (dt.Columns.Count == 0) { if (MappingTemplate.mp_firstlineisheader == false) { for (int i = 0; i < items.Length; i++) { dt.Columns.Add(new DataColumn("Column" + i, typeof(string))); } } else { for (int i = 0; i < items.Length; i++) { dt.Columns.Add(new DataColumn(items[i].ToString(), typeof(string))); } } } dt.Rows.Add(items); } } File.Delete(path); ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'I', lfu_Err_type = 'I', lfu_Err_no = 1, lfu_Err_Column = "Total", lfu_Err_Message = (dt.Rows.Count - 1).ToString() }); foreach (WHONetMappingListsDTO item in WHONetColumnMandatory) { var wnm_originalfield = item.wnm_originalfield; if (MappingTemplate.mp_firstlineisheader == false) { int index = 0; Int32.TryParse(item.wnm_originalfield.Replace("Column", ""), out index); item.wnm_originalfield = "Column" + (index - 1); } Boolean columnExists = dt.Columns.Contains(item.wnm_originalfield); if (columnExists == false) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'C', lfu_Err_no = 1, lfu_Err_Column = wnm_originalfield, lfu_Err_Message = "ไม่พบ Column " + wnm_originalfield }); } if (columnExists) { var chkResult = dt.Select(item.wnm_originalfield + " = ''"); if (chkResult.Length > 0) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'N', lfu_Err_no = 1, lfu_Err_Column = wnm_originalfield, lfu_Err_Message = "กรุณาตรวจสอบข้อมูล Column " + wnm_originalfield + " จะต้องไม่เท่ากับค่าว่าง" }); } } } } #endregion else { string tempFilename = Guid.NewGuid().ToString() + ".dbf"; //var path = Path.Combine(@"D:\Work\02-DMSC ALISS\TEMP\", tempFilename); //bool exists = System.IO.Directory.Exists(@"D:\Work\02-DMSC ALISS\TEMP\"); //if (!exists) // System.IO.Directory.CreateDirectory(@"D:\Work\02-DMSC ALISS\TEMP\"); //using (FileStream file = new FileStream(path, FileMode.Create)) //{ // try // { // await fileEntry.Data.CopyToAsync(file); // } // catch (Exception ex) // { // } // finally // { // file.Flush(); // } //} var options = new DbfDataReaderOptions { Encoding = Encoding.GetEncoding(874) }; using (var dbfDataReader = new DbfDataReader.DbfDataReader(path, options)) //using (var dbfDataReader = new DbfDataReader.DbfDataReader(@"D:\Work\02-DMSC ALISS\TEMP\" + tempFilename, options)) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'I', lfu_Err_type = 'I', lfu_Err_no = 1, lfu_Err_Column = "Total", lfu_Err_Message = dbfDataReader.DbfTable.Header.RecordCount.ToString() }); while (dbfDataReader.Read()) { //Validate Mandatory Field foreach (WHONetMappingListsDTO item in WHONetColumnMandatory) { var columnExists = dbfDataReader.DbfTable.Columns.FirstOrDefault(x => x.Name == item.wnm_originalfield); //var ll = dbfDataReader.DbfTable.Rea if (columnExists != null) { if (dbfDataReader[item.wnm_originalfield] == "" || dbfDataReader[item.wnm_originalfield] == null) { if (ErrorMessage.FirstOrDefault(x => x.lfu_Err_type == 'N' && x.lfu_Err_Column == item.wnm_originalfield) == null) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'N', lfu_Err_no = 1, lfu_Err_Column = item.wnm_originalfield, lfu_Err_Message = "กรุณาตรวจสอบข้อมูล Column " + item.wnm_originalfield + " จะต้องไม่เท่ากับค่าว่าง" }); var ccc = dbfDataReader[item.wnm_originalfield]; } } } else { if (ErrorMessage.FirstOrDefault(x => x.lfu_Err_type == 'C' && x.lfu_Err_Column == item.wnm_originalfield) == null) { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'E', lfu_Err_type = 'C', lfu_Err_no = 1, lfu_Err_Column = item.wnm_originalfield, lfu_Err_Message = "ไม่พบ Column " + item.wnm_originalfield }); } } } row++; } var x = ErrorMessage; //using (FileStream file = new FileStream(@"D:\Work\02-DMSC ALISS\TEMP\"+ fileEntry.Name, FileMode.Create)) //{ // try // { // await fileEntry.Data.CopyToAsync(file); // } // catch (Exception ex) // { // } // finally // { // file.Flush(); // } //} //var zipStream = new MemoryStream(); //using (var compressedFileStream = new MemoryStream()) //{ // //compressedFileStream.Seek(0, SeekOrigin.Begin); // using (var zipArchive = new ZipArchive(compressedFileStream, ZipArchiveMode.Create, false)) // { // var zipEntry = zipArchive.CreateEntry(fileEntry.Name); // using (var zipEntryStream = zipEntry.Open()) // { // try // { // await fileEntry.Data.CopyToAsync(zipEntryStream); // //fileEntry.Data.CopyTo(zipEntryStream); // } // catch (Exception ex) // { // } // finally // { // zipEntryStream.Flush(); // } // } // } // using (var fileStream = new FileStream(@"D:\Work\02-DMSC ALISS\TEMP\test.zip", FileMode.Create)) // { // var bytes = compressedFileStream.GetBuffer(); // fileStream.Write(bytes, 0, bytes.Length); // try // { // await compressedFileStream.CopyToAsync(fileStream); // } // catch (Exception ex) // { // } // finally // { // fileStream.Flush(); // } // } //} //using (var outStream = new MemoryStream()) //{ // using (var archive = new ZipArchive(outStream, ZipArchiveMode.Create, true)) // { // var fileInArchive = archive.CreateEntry(fileEntry.Name, CompressionLevel.Optimal); // using (var entryStream = fileInArchive.Open()) // await fileEntry.Data.CopyToAsync(entryStream); // } // using (var fileStream = new FileStream(@"D:\test.zip", FileMode.Create)) // { // outStream.Seek(0, SeekOrigin.Begin); // outStream.CopyTo(fileStream); // } //} } //File.Delete(path); } var chkError = ErrorMessage.FirstOrDefault(x => x.lfu_status == 'E'); if (chkError != null) { File.Delete(path); } else { ErrorMessage.Add(new LabFileUploadErrorMessageDTO { lfu_status = 'I', lfu_Err_type = 'P', lfu_Err_no = 1, lfu_Err_Column = "path", lfu_Err_Message = path }); } } catch (Exception ex) { } return(ErrorMessage); }
static void Main(string[] args) { DateTime inicio = DateTime.Now; string posPath = ConfigurationManager.AppSettings["posFile"].ToString(); string prodPath = ConfigurationManager.AppSettings["prodFile"].ToString(); List <Venta> ventas = new List <Venta>(); List <Producto> productos = new List <Producto>(); Console.WriteLine(inicio.ToString("HH:mm:ss")); int i = 0; DbfDataReader.DbfDataReader dbfData = new DbfDataReader.DbfDataReader(posPath); // Leer todas las ventas. while (dbfData.Read()) { i++; string codigoProd = dbfData.GetValue(4).ToString(); double precio = (double)dbfData.GetValue(10); double importe = (double)dbfData.GetValue(25); DateTime fecha = DateTime.Parse(dbfData.GetValue(30).ToString()); ventas.Add(new Venta() { codigoProd = codigoProd, precio = precio, importe = importe, fecha = fecha }); Console.WriteLine(" "); // Mostrar avances cada 10,000.. if (i % 10000 == 0) { Console.WriteLine(i); } } // leer todos los productos. dbfData = new DbfDataReader.DbfDataReader("C:\\workspace\\conectorAdminPAQ\\LATIENDITA\\MGW10005.dbf"); while (dbfData.Read()) { string id = dbfData.GetValue(0).ToString(); string codProd = dbfData.GetValue(1).ToString(); string nombre = dbfData.GetValue(2).ToString(); string precio = dbfData.GetValue(43).ToString(); productos.Add(new Producto() { id = id, codigo = codProd, nombre = nombre, precio = precio }); } Console.WriteLine(inicio.ToString("HH:mm:ss")); List <Venta> result = (from X in ventas where X.fecha > DateTime.Now.AddDays(-50000) && X.importe > 0 select X).ToList(); DateTime filtro = DateTime.Now; List <Venta> ordenada = (from X in result group X by X.codigoProd into newGroup orderby newGroup.Sum(s => s.importe) descending select new Venta { codigoProd = newGroup.Key, importe = newGroup.Sum(s => s.importe) }).Take(200).ToList(); List <Producto> joinada = (from X in ordenada join p in productos on X.codigoProd equals p.id select new Producto { codigo = p.codigo, nombre = p.nombre, precio = p.precio }).ToList(); int pos = 1; foreach (Producto producto in joinada) { Console.WriteLine(producto.codigo + " - " + producto.nombre); pos++; } DateTime finalizacion = DateTime.Now; Console.WriteLine($"Proceso terminado. Duracion total {(finalizacion - inicio).TotalSeconds} segundos...."); Thread.Sleep(160000); }