Beispiel #1
0
        private void CloseFile(IDataLoadEventListener listener)
        {
            //we never even started or have already closed
            if (!haveOpened || _fileAlreadyClosed)
            {
                return;
            }

            _fileAlreadyClosed = true;

            try
            {
                //whatever happens in the writing block, make sure to at least attempt to close off the file
                _output.Close();
                GC.Collect(); //prevents file locks from sticking around

                //close audit object - unless it was prematurely closed e.g. by a failure somewhere
                if (!TableLoadInfo.IsClosed)
                {
                    TableLoadInfo.CloseAndArchive();
                }

                // also close off the cumulative extraction result
                var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults;
                if (result != null)
                {
                    result.CompleteAudit(this.GetType(), GetDestinationDescription(), LinesWritten);
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error when trying to close csv file", e));
            }
        }
        public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener job, GracefulCancellationToken cancellationToken)
        {
            _request.ElevateState(ExtractCommandState.WritingToFile);

            if (!haveWrittenBundleContents && _request is ExtractDatasetCommand)
            {
                WriteBundleContents(((ExtractDatasetCommand)_request).DatasetBundle, job, cancellationToken);
            }

            if (_request is ExtractGlobalsCommand)
            {
                ExtractGlobals((ExtractGlobalsCommand)_request, job, _dataLoadInfo);
                return(null);
            }

            stopwatch.Start();
            if (!haveOpened)
            {
                haveOpened = true;
                _output.Open();
                _output.WriteHeaders(toProcess);
                LinesWritten = 0;

                //create an audit object
                TableLoadInfo = new TableLoadInfo(_dataLoadInfo, "", OutputFile, new DataSource[] { new DataSource(_request.DescribeExtractionImplementation(), DateTime.Now) }, -1);
            }

            foreach (DataRow row in toProcess.Rows)
            {
                _output.Append(row);

                LinesWritten++;

                if (TableLoadInfo.IsClosed)
                {
                    throw new Exception("TableLoadInfo was closed so could not write number of rows (" + LinesWritten + ") to audit object - most likely the extraction crashed?");
                }
                else
                {
                    TableLoadInfo.Inserts = LinesWritten;
                }

                if (LinesWritten % 1000 == 0)
                {
                    job.OnProgress(this, new ProgressEventArgs("Write to file " + OutputFile, new ProgressMeasurement(LinesWritten, ProgressType.Records), stopwatch.Elapsed));
                }
            }
            job.OnProgress(this, new ProgressEventArgs("Write to file " + OutputFile, new ProgressMeasurement(LinesWritten, ProgressType.Records), stopwatch.Elapsed));

            stopwatch.Stop();
            _output.Flush();

            return(null);
        }
        public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny)
        {
            if (_destination != null)
            {
                _destination.Dispose(listener, pipelineFailureExceptionIfAny);

                //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails
                if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails)
                {
                    if (_destinationDatabase != null)
                    {
                        var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);

                        if (tbl.Exists())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl));
                            tbl.Drop();
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl));
                        }
                    }
                }

                if (pipelineFailureExceptionIfAny == null &&
                    _request.IsBatchResume &&
                    MakeFinalTableDistinctWhenBatchResuming &&
                    _destinationDatabase != null &&
                    _toProcess != null)
                {
                    var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);
                    if (tbl.Exists())
                    {
                        // if there is no primary key then failed batches may have introduced duplication
                        if (!tbl.DiscoverColumns().Any(p => p.IsPrimaryKey))
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Making {tbl} distinct incase there are duplicate rows from bad batch resumes"));
                            tbl.MakeDistinct(50000000);
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Finished distincting {tbl}"));
                        }
                    }
                }
            }

            TableLoadInfo?.CloseAndArchive();

            // also close off the cumulative extraction result
            if (_request is ExtractDatasetCommand)
            {
                var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults;
                if (result != null && _toProcess != null)
                {
                    result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts, _request.IsBatchResume, pipelineFailureExceptionIfAny != null);
                }
            }
        }
        public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            _request.ElevateState(ExtractCommandState.WritingToFile);
            _toProcess = toProcess;

            _destinationDatabase = GetDestinationDatabase(listener);

            //give the data table the correct name
            if (toProcess.ExtendedProperties.ContainsKey("ProperlyNamed") && toProcess.ExtendedProperties["ProperlyNamed"].Equals(true))
            {
                _isTableAlreadyNamed = true;
            }

            _toProcess.TableName = GetTableName();

            if (_destination == null)
            {
                _destination = PrepareDestination(listener, toProcess);
            }

            if (TableLoadInfo == null)
            {
                TableLoadInfo = new TableLoadInfo(_dataLoadInfo, "", _toProcess.TableName, new[] { new DataSource(_request.DescribeExtractionImplementation(), DateTime.Now) }, -1);
            }

            if (TableLoadInfo.IsClosed) // Maybe it was open and it creashed?
            {
                throw new Exception("TableLoadInfo was closed so could not write number of rows (" + toProcess.Rows.Count + ") to audit object - most likely the extraction crashed?");
            }

            if (_request is ExtractDatasetCommand && !haveExtractedBundledContent)
            {
                WriteBundleContents(((ExtractDatasetCommand)_request).DatasetBundle, listener, cancellationToken);
            }

            if (_request is ExtractGlobalsCommand)
            {
                ExtractGlobals((ExtractGlobalsCommand)_request, listener, _dataLoadInfo);
                return(null);
            }

            _destination.ProcessPipelineData(toProcess, listener, cancellationToken);
            TableLoadInfo.Inserts += toProcess.Rows.Count;

            return(null);
        }
Beispiel #5
0
        public void FataErrorLoggingTest()
        {
            DataLoadInfo d = new DataLoadInfo("Internal", "HICSSISLibraryTests.FataErrorLoggingTest",
                                              "Test case for fatal error generation",
                                              "No rollback is possible/required as no database rows are actually inserted",
                                              true, new DiscoveredServer(UnitTestLoggingConnectionString));

            DataSource[] ds = new DataSource[] { new DataSource("nothing", DateTime.Now) };



            TableLoadInfo t = new TableLoadInfo(d, "Unit test only", "Unit test only", ds, 5);

            t.Inserts += 3; //simulate that it crashed after 3

            d.LogFatalError("HICSSISLibraryTests.FataErrorLoggingTest", "Some terrible event happened");

            Assert.IsTrue(d.IsClosed);
        }
Beispiel #6
0
        /// <summary>
        /// Extracts the rows in <paramref name="toProcess"/> to the extraction destination
        /// </summary>
        /// <param name="toProcess"></param>
        /// <param name="job"></param>
        /// <param name="cancellationToken"></param>
        /// <returns></returns>
        public virtual DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener job, GracefulCancellationToken cancellationToken)
        {
            _request.ElevateState(ExtractCommandState.WritingToFile);

            if (!haveWrittenBundleContents && _request is ExtractDatasetCommand)
            {
                WriteBundleContents(((ExtractDatasetCommand)_request).DatasetBundle, job, cancellationToken);
                haveWrittenBundleContents = true;
            }

            if (_request is ExtractGlobalsCommand)
            {
                ExtractGlobals((ExtractGlobalsCommand)_request, job, _dataLoadInfo);
                return(null);
            }

            stopwatch.Start();
            if (!haveOpened)
            {
                haveOpened   = true;
                LinesWritten = 0;
                Open(toProcess, job, cancellationToken);

                //create an audit object
                TableLoadInfo = new TableLoadInfo(_dataLoadInfo, "", OutputFile, new DataSource[] { new DataSource(_request.DescribeExtractionImplementation(), DateTime.Now) }, -1);
            }

            WriteRows(toProcess, job, cancellationToken, stopwatch);

            if (TableLoadInfo.IsClosed)
            {
                throw new Exception("TableLoadInfo was closed so could not write number of rows (" + LinesWritten + ") to audit object - most likely the extraction crashed?");
            }
            else
            {
                TableLoadInfo.Inserts = LinesWritten;
            }

            Flush(job, cancellationToken, stopwatch);
            stopwatch.Stop();

            return(null);
        }