public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { if (_destination != null) { _destination.Dispose(listener, pipelineFailureExceptionIfAny); //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails) { if (_destinationDatabase != null) { var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName); if (tbl.Exists()) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl)); tbl.Drop(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl)); } } } } TableLoadInfo?.CloseAndArchive(); // also close off the cumulative extraction result if (_request is ExtractDatasetCommand) { var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults; if (result != null && _toProcess != null) { result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts); } } }
private void CloseFile(IDataLoadEventListener listener) { //we never even started or have already closed if (!haveOpened || _fileAlreadyClosed) { return; } _fileAlreadyClosed = true; try { //whatever happens in the writing block, make sure to at least attempt to close off the file _output.Close(); GC.Collect(); //prevents file locks from sticking around //close audit object - unless it was prematurely closed e.g. by a failure somewhere if (!TableLoadInfo.IsClosed) { TableLoadInfo.CloseAndArchive(); } // also close off the cumulative extraction result var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults; if (result != null) { result.CompleteAudit(this.GetType(), GetDestinationDescription(), LinesWritten); } } catch (Exception e) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error when trying to close csv file", e)); } }
public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { if (_destination != null) { _destination.Dispose(listener, pipelineFailureExceptionIfAny); //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails) { if (_destinationDatabase != null) { var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName); if (tbl.Exists()) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl)); tbl.Drop(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl)); } } } if (pipelineFailureExceptionIfAny == null && _request.IsBatchResume && MakeFinalTableDistinctWhenBatchResuming && _destinationDatabase != null && _toProcess != null) { var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName); if (tbl.Exists()) { // if there is no primary key then failed batches may have introduced duplication if (!tbl.DiscoverColumns().Any(p => p.IsPrimaryKey)) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Making {tbl} distinct incase there are duplicate rows from bad batch resumes")); tbl.MakeDistinct(50000000); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Finished distincting {tbl}")); } } } } TableLoadInfo?.CloseAndArchive(); // also close off the cumulative extraction result if (_request is ExtractDatasetCommand) { var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults; if (result != null && _toProcess != null) { result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts, _request.IsBatchResume, pipelineFailureExceptionIfAny != null); } } }
public void MD5Test() { string fileContents = "TestStringThatCouldBeSomethingInAFile"; byte[] hashAsBytes; MemoryStream memory = new MemoryStream(); StreamWriter writeToMemory = new StreamWriter(memory); writeToMemory.Write(fileContents); memory.Flush(); memory.Position = 0; using (var md5 = MD5.Create()) { hashAsBytes = md5.ComputeHash(memory); } DataSource[] ds = new DataSource[] { new DataSource("nothing", DateTime.Now) }; ds[0].MD5 = hashAsBytes; //MD5 is a property so confirm write and read are the same - and dont bomb Assert.AreEqual(ds[0].MD5, hashAsBytes); DataLoadInfo d = new DataLoadInfo("Internal", "HICSSISLibraryTests.FataErrorLoggingTest", "Test case for fatal error generation", "No rollback is possible/required as no database rows are actually inserted", true, new DiscoveredServer(UnitTestLoggingConnectionString)); TableLoadInfo t = new TableLoadInfo(d, "Unit test only", "Unit test only", ds, 5); t.Inserts += 5; //simulate that it crashed after 3 t.CloseAndArchive(); d.CloseAndMarkComplete(); }