Ejemplo n.º 1
0
        private DiscoveredTable UploadTestDataAsTableToServer(DatabaseType type, out Catalogue catalogue, out ExtractionInformation[] extractionInformations, out TableInfo tableinfo)
        {
            var listener = new ThrowImmediatelyDataLoadEventListener();

            var db = GetCleanedServer(type);

            var data = GetTestDataTable();

            var uploader = new DataTableUploadDestination();

            uploader.PreInitialize(db, listener);
            uploader.ProcessPipelineData(data, listener, new GracefulCancellationToken());
            uploader.Dispose(listener, null);
            var tbl = db.ExpectTable(uploader.TargetTableName);

            Assert.IsTrue(tbl.Exists());

            ColumnInfo[] cis;
            new TableInfoImporter(CatalogueRepository, tbl).DoImport(out tableinfo, out cis);


            CatalogueItem[] cataitems;
            new ForwardEngineerCatalogue(tableinfo, cis, true).ExecuteForwardEngineering(out catalogue, out cataitems, out extractionInformations);

            return(tbl);
        }
        public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny)
        {
            if (_destination != null)
            {
                _destination.Dispose(listener, pipelineFailureExceptionIfAny);

                //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails
                if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails)
                {
                    if (_destinationDatabase != null)
                    {
                        var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);

                        if (tbl.Exists())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl));
                            tbl.Drop();
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl));
                        }
                    }
                }
            }

            TableLoadInfo?.CloseAndArchive();

            // also close off the cumulative extraction result
            if (_request is ExtractDatasetCommand)
            {
                var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults;
                if (result != null && _toProcess != null)
                {
                    result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts);
                }
            }
        }
        public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny)
        {
            if (_destination != null)
            {
                _destination.Dispose(listener, pipelineFailureExceptionIfAny);

                //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails
                if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails)
                {
                    if (_destinationDatabase != null)
                    {
                        var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);

                        if (tbl.Exists())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl));
                            tbl.Drop();
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl));
                        }
                    }
                }

                if (pipelineFailureExceptionIfAny == null &&
                    _request.IsBatchResume &&
                    MakeFinalTableDistinctWhenBatchResuming &&
                    _destinationDatabase != null &&
                    _toProcess != null)
                {
                    var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);
                    if (tbl.Exists())
                    {
                        // if there is no primary key then failed batches may have introduced duplication
                        if (!tbl.DiscoverColumns().Any(p => p.IsPrimaryKey))
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Making {tbl} distinct incase there are duplicate rows from bad batch resumes"));
                            tbl.MakeDistinct(50000000);
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Finished distincting {tbl}"));
                        }
                    }
                }
            }

            TableLoadInfo?.CloseAndArchive();

            // also close off the cumulative extraction result
            if (_request is ExtractDatasetCommand)
            {
                var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults;
                if (result != null && _toProcess != null)
                {
                    result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts, _request.IsBatchResume, pipelineFailureExceptionIfAny != null);
                }
            }
        }
        private void ExtractLookupTableSql(BundledLookupTable lookup, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                var server = DataAccessPortal.GetInstance().ExpectServer(lookup.TableInfo, DataAccessContext.DataExport);

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for lookup " + lookup.TableInfo.Name));
                using (var con = server.GetConnection())
                {
                    con.Open();
                    var sqlString = "SELECT * FROM " + lookup.TableInfo.Name;
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command: " + sqlString));
                    var cmd = DatabaseCommandHelper.GetCommand(sqlString, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(lookup.TableInfo.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sqlString, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from Lookup " + lookup.TableInfo.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for Lookup: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult("SELECT * FROM " + lookup.TableInfo.Name, lookup.TableInfo);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of Lookup " + lookup.TableInfo.Name + " failed ", e));
                throw;
            }
        }
Ejemplo n.º 5
0
        public void Test_SingleFile(bool expressRelative)
        {
            var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer);

            var source = new DicomFileCollectionSource {
                FilenameField = "RelativeFileArchiveURI"
            };

            if (expressRelative)
            {
                source.ArchiveRoot = TestContext.CurrentContext.TestDirectory;
            }

            var f = new FlatFileToLoad(new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData/IM-0001-0013.dcm")));

            source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener());

            var tbl         = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            var destination = new DataTableUploadDestination();

            destination.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener());
            destination.AllowResizingColumnsAtUploadTime = true;
            destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var finalTable = db.ExpectTable(destination.TargetTableName);

            using (var dt = finalTable.GetDataTable())
            {
                //should be 1 row in the final table
                Assert.AreEqual(1, dt.Rows.Count);

                //the path referenced should be the file read in relative/absolute format
                Assert.AreEqual(expressRelative ? "./TestData/IM-0001-0013.dcm":
                                f.File.FullName.Replace('\\', '/')
                                , dt.Rows[0]["RelativeFileArchiveURI"]);
            }

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
Ejemplo n.º 6
0
        private DiscoveredTable UploadTestDataAsTableToServer(DatabaseType type, out ICatalogue catalogue, out ExtractionInformation[] extractionInformations, out ITableInfo tableinfo)
        {
            var listener = new ThrowImmediatelyDataLoadEventListener();

            var db = GetCleanedServer(type);

            var data = GetTestDataTable();

            var uploader = new DataTableUploadDestination();

            uploader.PreInitialize(db, listener);
            uploader.ProcessPipelineData(data, listener, new GracefulCancellationToken());
            uploader.Dispose(listener, null);
            var tbl = db.ExpectTable(uploader.TargetTableName);

            Assert.IsTrue(tbl.Exists());

            catalogue = Import(tbl, out tableinfo, out _, out _, out extractionInformations);

            return(tbl);
        }
        public void PipelineTest()
        {
            var source = new DicomFileCollectionSource();

            source.FilenameField = "RelativeFileArchiveURI";

            var f = new FlatFileToLoad(new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData\IM-0001-0013.dcm")));

            source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener());

            var tbl         = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            var destination = new DataTableUploadDestination();

            destination.PreInitialize(DiscoveredDatabaseICanCreateRandomTablesIn, new ThrowImmediatelyDataLoadEventListener());
            destination.AllowResizingColumnsAtUploadTime = true;
            destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var finalTable = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(destination.TargetTableName);

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
Ejemplo n.º 8
0
        public void Test_DatabaseTypeQueryWithParameter_IntParameter(DatabaseType dbType)
        {
            //Pick the destination server
            var tableName = TestDatabaseNames.GetConsistentName("tbl");

            //make sure there's a database ready to receive the data
            var db = GetCleanedServer(dbType);

            db.Create(true);


            //this is the table we are uploading
            var dt = new DataTable();

            dt.Columns.Add("numbercol");
            dt.Rows.Add(10);
            dt.Rows.Add(15);
            dt.Rows.Add(20);
            dt.Rows.Add(25);
            dt.TableName = tableName;
            try
            {
                ///////////////////////UPLOAD THE DataTable TO THE DESTINATION////////////////////////////////////////////
                var uploader = new DataTableUploadDestination();
                uploader.PreInitialize(db, new ThrowImmediatelyDataLoadJob());
                uploader.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken());
                uploader.Dispose(new ThrowImmediatelyDataLoadJob(), null);

                var tbl = db.ExpectTable(tableName);

                var importer = new TableInfoImporter(CatalogueRepository, tbl);
                importer.DoImport(out var ti, out var ci);

                var engineer = new ForwardEngineerCatalogue(ti, ci, true);
                engineer.ExecuteForwardEngineering(out var cata, out var cis, out var ei);
                /////////////////////////////////////////////////////////////////////////////////////////////////////////

                /////////////////////////////////THE ACTUAL PROPER TEST////////////////////////////////////
                //create an extraction filter
                var extractionInformation = ei.Single();
                var filter = new ExtractionFilter(CatalogueRepository, "Filter by numbers", extractionInformation);
                filter.WhereSQL = extractionInformation.SelectSQL + " = @n";
                filter.SaveToDatabase();

                //create the parameters for filter (no globals, masters or scope adjacent parameters)
                new ParameterCreator(filter.GetFilterFactory(), null, null).CreateAll(filter, null);

                var p = filter.GetAllParameters().Single();
                Assert.AreEqual("@n", p.ParameterName);
                p.ParameterSQL = p.ParameterSQL.Replace("varchar(50)", "int"); //make it int
                p.Value        = "20";
                p.SaveToDatabase();

                var qb = new QueryBuilder(null, null);
                qb.AddColumn(extractionInformation);
                qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, new[] { filter }, FilterContainerOperation.AND);

                using (var con = db.Server.GetConnection())
                {
                    con.Open();

                    string sql = qb.SQL;

                    var cmd = db.Server.GetCommand(sql, con);
                    var r   = cmd.ExecuteReader();
                    Assert.IsTrue(r.Read());
                    Assert.AreEqual(
                        20,
                        r[extractionInformation.GetRuntimeName()]);
                }
                ///////////////////////////////////////////////////////////////////////////////////////
            }
            finally
            {
                db.Drop();
            }
        }
Ejemplo n.º 9
0
        private void CopyCohortToDataServer(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            DataTable cohortDataTable = null;

            SetServer();

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to wait for Semaphore OneCrossServerExtractionAtATime to become available"));
            OneCrossServerExtractionAtATime.WaitOne(-1);
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Captured Semaphore OneCrossServerExtractionAtATime"));

            try
            {
                IExtractableCohort cohort = Request.ExtractableCohort;
                cohortDataTable = cohort.FetchEntireCohort();
            }
            catch (Exception e)
            {
                throw new Exception("An error occurred while trying to download the cohort from the Cohort server (in preparation for transfering it to the data server for linkage and extraction)", e);
            }

            //make sure tempdb exists (this covers you for servers where it doesn't exist e.g. mysql or when user has specified a different database name)
            if (!_tempDb.Exists())
            {
                if (CreateAndDestroyTemporaryDatabaseIfNotExists)
                {
                    _tempDb.Create();
                    _hadToCreate = true;
                }
                else
                {
                    throw new Exception("Database '" + _tempDb + "' did not exist on server '" + _server + "' and CreateAndDestroyTemporaryDatabaseIfNotExists was false");
                }
            }
            else
            {
                _hadToCreate = false;
            }

            var tbl = _tempDb.ExpectTable(cohortDataTable.TableName);

            if (tbl.Exists())
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found existing table called '" + tbl + "' in '" + _tempDb + "'"));

                if (DropExistingCohortTableIfExists)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "About to drop existing table '" + tbl + "'"));

                    try
                    {
                        tbl.Drop();
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped existing table '" + tbl + "'"));
                    }
                    catch (Exception ex)
                    {
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Warning dropping '" + tbl + "' failed", ex));
                    }
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "'" + _tempDb + "' contains a table called '" + tbl + "' and DropExistingCohortTableIfExists is false"));
                }
            }

            var destination = new DataTableUploadDestination();

            destination.PreInitialize(_tempDb, listener);
            destination.ProcessPipelineData(cohortDataTable, listener, cancellationToken);
            destination.Dispose(listener, null);



            if (!tbl.Exists())
            {
                throw new Exception("Table '" + tbl + "' did not exist despite DataTableUploadDestination completing Successfully!");
            }

            tablesToCleanup.Add(tbl);

            //table will now be in tempdb
            _haveCopiedCohortAndAdjustedSql = true;
        }
Ejemplo n.º 10
0
        public void Test_ZipFile(bool expressRelative)
        {
            //get a clean database to upload to
            var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer);

            //create a folder in which to generate some dicoms
            var dirToLoad = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile)));

            if (dirToLoad.Exists)
            {
                dirToLoad.Delete(true);
            }

            dirToLoad.Create();

            //generate some random dicoms
            var r = new Random(999);
            DicomDataGenerator generator = new DicomDataGenerator(r, dirToLoad, "CT")
            {
                MaximumImages = 5
            };
            var people = new PersonCollection();

            people.GeneratePeople(1, r);
            generator.GenerateTestDataFile(people, new FileInfo("./inventory.csv"), 1);

            //This generates
            // Test_ZipFile
            //      2015
            //          3
            //              18
            //                  751140 2.25.166922918107154891877498685128076062226.dcm
            //                  751140 2.25.179610809676265137473873365625829826423.dcm
            //                  751140 2.25.201969634959506849065133495434871450465.dcm
            //                  751140 2.25.237492679533001779093365416814254319890.dcm
            //                  751140 2.25.316241631782653383510844072713132248731.dcm

            var yearDir = dirToLoad.GetDirectories().Single();

            StringAssert.IsMatch("\\d{4}", yearDir.Name);

            //zip them up
            FileInfo zip = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile) + ".zip")); Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile) + ".zip");

            if (zip.Exists)
            {
                zip.Delete();
            }

            ZipFile.CreateFromDirectory(dirToLoad.FullName, zip.FullName);

            //tell the source to load the zip
            var f = new FlatFileToLoad(zip);

            var source = new DicomFileCollectionSource {
                FilenameField = "RelativeFileArchiveURI"
            };

            if (expressRelative)
            {
                source.ArchiveRoot = TestContext.CurrentContext.TestDirectory;
            }

            source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener());

            var tbl         = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            var destination = new DataTableUploadDestination();

            destination.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener());
            destination.AllowResizingColumnsAtUploadTime = true;
            destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var finalTable = db.ExpectTable(destination.TargetTableName);

            using (var dt = finalTable.GetDataTable())
            {
                //should be 5 rows in the final table (5 images)
                Assert.AreEqual(5, dt.Rows.Count);

                string pathInDbToDicomFile = (string)dt.Rows[0]["RelativeFileArchiveURI"];

                //We expect either something like:
                // E:/RdmpDicom/Rdmp.Dicom.Tests/bin/Debug/netcoreapp2.2/Test_ZipFile.zip!2015/3/18/2.25.160787663560951826149226183314694084702.dcm
                // ./Test_ZipFile.zip!2015/3/18/2.25.105592977437473375573190160334447272386.dcm

                //the path referenced should be the file read in relative/absolute format
                StringAssert.IsMatch(
                    expressRelative ? $@"./{zip.Name}![\d./]*.dcm":
                    $@"{Regex.Escape(zip.FullName.Replace('\\','/'))}![\d./]*.dcm",
                    pathInDbToDicomFile);

                StringAssert.Contains(yearDir.Name, pathInDbToDicomFile, "Expected zip file to have subdirectories and for them to be loaded correctly");

                //confirm we can read that out again
                using (var pool = new ZipPool())
                {
                    var path = new AmbiguousFilePath(TestContext.CurrentContext.TestDirectory, pathInDbToDicomFile);
                    Assert.IsNotNull(path.GetDataset(pool));
                }
            }

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
        private ExtractCommandState ExtractSupportingSql(SupportingSQLTable sql, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for global SupportingSQL " + sql.Name));
                using (var con = sql.GetServer().GetConnection())
                {
                    con.Open();
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command " + sql.SQL));
                    var cmd = DatabaseCommandHelper.GetCommand(sql.SQL, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(sql.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sql.SQL, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from SupportingSQL " + sql.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for SupportingSQL: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult(sql.SQL, sql);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                    else
                    {
                        var extractGlobalsCommand = (_request as ExtractGlobalsCommand);
                        Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
                        var result =
                            new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
                                                              extractGlobalsCommand.Configuration,
                                                              sql.SQL,
                                                              sql);
                        result.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                        extractGlobalsCommand.ExtractionResults.Add(result);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of SupportingSQL " + sql + " failed ", e));
                return(ExtractCommandState.Crashed);
            }

            return(ExtractCommandState.Completed);
        }