private DiscoveredTable UploadTestDataAsTableToServer(DatabaseType type, out Catalogue catalogue, out ExtractionInformation[] extractionInformations, out TableInfo tableinfo)
        {
            var listener = new ThrowImmediatelyDataLoadEventListener();

            var db = GetCleanedServer(type);

            var data = GetTestDataTable();

            var uploader = new DataTableUploadDestination();

            uploader.PreInitialize(db, listener);
            uploader.ProcessPipelineData(data, listener, new GracefulCancellationToken());
            uploader.Dispose(listener, null);
            var tbl = db.ExpectTable(uploader.TargetTableName);

            Assert.IsTrue(tbl.Exists());

            ColumnInfo[] cis;
            new TableInfoImporter(CatalogueRepository, tbl).DoImport(out tableinfo, out cis);


            CatalogueItem[] cataitems;
            new ForwardEngineerCatalogue(tableinfo, cis, true).ExecuteForwardEngineering(out catalogue, out cataitems, out extractionInformations);

            return(tbl);
        }
        private void ExtractLookupTableSql(BundledLookupTable lookup, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                var server = DataAccessPortal.GetInstance().ExpectServer(lookup.TableInfo, DataAccessContext.DataExport);

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for lookup " + lookup.TableInfo.Name));
                using (var con = server.GetConnection())
                {
                    con.Open();
                    var sqlString = "SELECT * FROM " + lookup.TableInfo.Name;
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command: " + sqlString));
                    var cmd = DatabaseCommandHelper.GetCommand(sqlString, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(lookup.TableInfo.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sqlString, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from Lookup " + lookup.TableInfo.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for Lookup: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult("SELECT * FROM " + lookup.TableInfo.Name, lookup.TableInfo);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of Lookup " + lookup.TableInfo.Name + " failed ", e));
                throw;
            }
        }
        private DataTableUploadDestination PrepareDestination(IDataLoadEventListener listener, DataTable toProcess)
        {
            //see if the user has entered an extraction server/database
            if (TargetDatabaseServer == null)
            {
                throw new Exception("TargetDatabaseServer (the place you want to extract the project data to) property has not been set!");
            }

            try
            {
                if (!_destinationDatabase.Exists())
                {
                    _destinationDatabase.Create();
                }

                if (_request is ExtractGlobalsCommand)
                {
                    return(null);
                }

                var tblName = _toProcess.TableName;

                //See if table already exists on the server (likely to cause problems including duplication, schema changes in configuration etc)
                if (_destinationDatabase.ExpectTable(tblName).Exists())
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning,
                                                                "A table called " + tblName + " already exists on server " + TargetDatabaseServer +
                                                                ", data load might crash if it is populated and/or has an incompatible schema"));
                }
                else
                {
                    _tableDidNotExistAtStartOfLoad = true;
                }
            }
            catch (Exception e)
            {
                //Probably the database didn't exist or the credentials were wrong or something
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to inspect destination for already existing datatables", e));
            }

            _destination = new DataTableUploadDestination();

            PrimeDestinationTypesBasedOnCatalogueTypes(toProcess);

            _destination.AllowResizingColumnsAtUploadTime = true;
            _destination.AlterTimeout = AlterTimeout;

            _destination.PreInitialize(_destinationDatabase, listener);

            return(_destination);
        }
        protected override void Open(DataTable toProcess, IDataLoadEventListener job, GracefulCancellationToken cancellationToken)
        {
            _toProcess = toProcess;

            //give the data table the correct name
            if (_toProcess.ExtendedProperties.ContainsKey("ProperlyNamed") && _toProcess.ExtendedProperties["ProperlyNamed"].Equals(true))
            {
                _isTableAlreadyNamed = true;
            }

            _toProcess.TableName = GetTableName();

            _destination = PrepareDestination(job, _toProcess);
            OutputFile   = _toProcess.TableName;
        }
        public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            _request.ElevateState(ExtractCommandState.WritingToFile);
            _toProcess = toProcess;

            _destinationDatabase = GetDestinationDatabase(listener);

            //give the data table the correct name
            if (toProcess.ExtendedProperties.ContainsKey("ProperlyNamed") && toProcess.ExtendedProperties["ProperlyNamed"].Equals(true))
            {
                _isTableAlreadyNamed = true;
            }

            _toProcess.TableName = GetTableName();

            if (_destination == null)
            {
                _destination = PrepareDestination(listener, toProcess);
            }

            if (TableLoadInfo == null)
            {
                TableLoadInfo = new TableLoadInfo(_dataLoadInfo, "", _toProcess.TableName, new[] { new DataSource(_request.DescribeExtractionImplementation(), DateTime.Now) }, -1);
            }

            if (TableLoadInfo.IsClosed) // Maybe it was open and it creashed?
            {
                throw new Exception("TableLoadInfo was closed so could not write number of rows (" + toProcess.Rows.Count + ") to audit object - most likely the extraction crashed?");
            }

            if (_request is ExtractDatasetCommand && !haveExtractedBundledContent)
            {
                WriteBundleContents(((ExtractDatasetCommand)_request).DatasetBundle, listener, cancellationToken);
            }

            if (_request is ExtractGlobalsCommand)
            {
                ExtractGlobals((ExtractGlobalsCommand)_request, listener, _dataLoadInfo);
                return(null);
            }

            _destination.ProcessPipelineData(toProcess, listener, cancellationToken);
            TableLoadInfo.Inserts += toProcess.Rows.Count;

            return(null);
        }
Exemple #6
0
        public void Test_SingleFile(bool expressRelative)
        {
            var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer);

            var source = new DicomFileCollectionSource {
                FilenameField = "RelativeFileArchiveURI"
            };

            if (expressRelative)
            {
                source.ArchiveRoot = TestContext.CurrentContext.TestDirectory;
            }

            var f = new FlatFileToLoad(new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData/IM-0001-0013.dcm")));

            source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener());

            var tbl         = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            var destination = new DataTableUploadDestination();

            destination.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener());
            destination.AllowResizingColumnsAtUploadTime = true;
            destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var finalTable = db.ExpectTable(destination.TargetTableName);

            using (var dt = finalTable.GetDataTable())
            {
                //should be 1 row in the final table
                Assert.AreEqual(1, dt.Rows.Count);

                //the path referenced should be the file read in relative/absolute format
                Assert.AreEqual(expressRelative ? "./TestData/IM-0001-0013.dcm":
                                f.File.FullName.Replace('\\', '/')
                                , dt.Rows[0]["RelativeFileArchiveURI"]);
            }

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
Exemple #7
0
        private DiscoveredTable UploadTestDataAsTableToServer(DatabaseType type, out ICatalogue catalogue, out ExtractionInformation[] extractionInformations, out ITableInfo tableinfo)
        {
            var listener = new ThrowImmediatelyDataLoadEventListener();

            var db = GetCleanedServer(type);

            var data = GetTestDataTable();

            var uploader = new DataTableUploadDestination();

            uploader.PreInitialize(db, listener);
            uploader.ProcessPipelineData(data, listener, new GracefulCancellationToken());
            uploader.Dispose(listener, null);
            var tbl = db.ExpectTable(uploader.TargetTableName);

            Assert.IsTrue(tbl.Exists());

            catalogue = Import(tbl, out tableinfo, out _, out _, out extractionInformations);

            return(tbl);
        }
        public void PipelineTest()
        {
            var source = new DicomFileCollectionSource();

            source.FilenameField = "RelativeFileArchiveURI";

            var f = new FlatFileToLoad(new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData\IM-0001-0013.dcm")));

            source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener());

            var tbl         = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            var destination = new DataTableUploadDestination();

            destination.PreInitialize(DiscoveredDatabaseICanCreateRandomTablesIn, new ThrowImmediatelyDataLoadEventListener());
            destination.AllowResizingColumnsAtUploadTime = true;
            destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var finalTable = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable(destination.TargetTableName);

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
        public void Test_DatabaseTypeQueryWithParameter_IntParameter(DatabaseType dbType)
        {
            //Pick the destination server
            var tableName = TestDatabaseNames.GetConsistentName("tbl");

            //make sure there's a database ready to receive the data
            var db = GetCleanedServer(dbType);

            db.Create(true);


            //this is the table we are uploading
            var dt = new DataTable();

            dt.Columns.Add("numbercol");
            dt.Rows.Add(10);
            dt.Rows.Add(15);
            dt.Rows.Add(20);
            dt.Rows.Add(25);
            dt.TableName = tableName;
            try
            {
                ///////////////////////UPLOAD THE DataTable TO THE DESTINATION////////////////////////////////////////////
                var uploader = new DataTableUploadDestination();
                uploader.PreInitialize(db, new ThrowImmediatelyDataLoadJob());
                uploader.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken());
                uploader.Dispose(new ThrowImmediatelyDataLoadJob(), null);

                var tbl = db.ExpectTable(tableName);

                var importer = new TableInfoImporter(CatalogueRepository, tbl);
                importer.DoImport(out var ti, out var ci);

                var engineer = new ForwardEngineerCatalogue(ti, ci, true);
                engineer.ExecuteForwardEngineering(out var cata, out var cis, out var ei);
                /////////////////////////////////////////////////////////////////////////////////////////////////////////

                /////////////////////////////////THE ACTUAL PROPER TEST////////////////////////////////////
                //create an extraction filter
                var extractionInformation = ei.Single();
                var filter = new ExtractionFilter(CatalogueRepository, "Filter by numbers", extractionInformation);
                filter.WhereSQL = extractionInformation.SelectSQL + " = @n";
                filter.SaveToDatabase();

                //create the parameters for filter (no globals, masters or scope adjacent parameters)
                new ParameterCreator(filter.GetFilterFactory(), null, null).CreateAll(filter, null);

                var p = filter.GetAllParameters().Single();
                Assert.AreEqual("@n", p.ParameterName);
                p.ParameterSQL = p.ParameterSQL.Replace("varchar(50)", "int"); //make it int
                p.Value        = "20";
                p.SaveToDatabase();

                var qb = new QueryBuilder(null, null);
                qb.AddColumn(extractionInformation);
                qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, new[] { filter }, FilterContainerOperation.AND);

                using (var con = db.Server.GetConnection())
                {
                    con.Open();

                    string sql = qb.SQL;

                    var cmd = db.Server.GetCommand(sql, con);
                    var r   = cmd.ExecuteReader();
                    Assert.IsTrue(r.Read());
                    Assert.AreEqual(
                        20,
                        r[extractionInformation.GetRuntimeName()]);
                }
                ///////////////////////////////////////////////////////////////////////////////////////
            }
            finally
            {
                db.Drop();
            }
        }
Exemple #10
0
        private void CopyCohortToDataServer(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            DataTable cohortDataTable = null;

            SetServer();

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to wait for Semaphore OneCrossServerExtractionAtATime to become available"));
            OneCrossServerExtractionAtATime.WaitOne(-1);
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Captured Semaphore OneCrossServerExtractionAtATime"));

            try
            {
                IExtractableCohort cohort = Request.ExtractableCohort;
                cohortDataTable = cohort.FetchEntireCohort();
            }
            catch (Exception e)
            {
                throw new Exception("An error occurred while trying to download the cohort from the Cohort server (in preparation for transfering it to the data server for linkage and extraction)", e);
            }

            //make sure tempdb exists (this covers you for servers where it doesn't exist e.g. mysql or when user has specified a different database name)
            if (!_tempDb.Exists())
            {
                if (CreateAndDestroyTemporaryDatabaseIfNotExists)
                {
                    _tempDb.Create();
                    _hadToCreate = true;
                }
                else
                {
                    throw new Exception("Database '" + _tempDb + "' did not exist on server '" + _server + "' and CreateAndDestroyTemporaryDatabaseIfNotExists was false");
                }
            }
            else
            {
                _hadToCreate = false;
            }

            var tbl = _tempDb.ExpectTable(cohortDataTable.TableName);

            if (tbl.Exists())
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found existing table called '" + tbl + "' in '" + _tempDb + "'"));

                if (DropExistingCohortTableIfExists)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "About to drop existing table '" + tbl + "'"));

                    try
                    {
                        tbl.Drop();
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped existing table '" + tbl + "'"));
                    }
                    catch (Exception ex)
                    {
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Warning dropping '" + tbl + "' failed", ex));
                    }
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "'" + _tempDb + "' contains a table called '" + tbl + "' and DropExistingCohortTableIfExists is false"));
                }
            }

            var destination = new DataTableUploadDestination();

            destination.PreInitialize(_tempDb, listener);
            destination.ProcessPipelineData(cohortDataTable, listener, cancellationToken);
            destination.Dispose(listener, null);



            if (!tbl.Exists())
            {
                throw new Exception("Table '" + tbl + "' did not exist despite DataTableUploadDestination completing Successfully!");
            }

            tablesToCleanup.Add(tbl);

            //table will now be in tempdb
            _haveCopiedCohortAndAdjustedSql = true;
        }
 protected override void Open(DataTable toProcess, IDataLoadEventListener job, GracefulCancellationToken cancellationToken)
 {
     _toProcess   = toProcess;
     _destination = PrepareDestination(job, toProcess);
     OutputFile   = toProcess.TableName;
 }
Exemple #12
0
        public void Test_ZipFile(bool expressRelative)
        {
            //get a clean database to upload to
            var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer);

            //create a folder in which to generate some dicoms
            var dirToLoad = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile)));

            if (dirToLoad.Exists)
            {
                dirToLoad.Delete(true);
            }

            dirToLoad.Create();

            //generate some random dicoms
            var r = new Random(999);
            DicomDataGenerator generator = new DicomDataGenerator(r, dirToLoad, "CT")
            {
                MaximumImages = 5
            };
            var people = new PersonCollection();

            people.GeneratePeople(1, r);
            generator.GenerateTestDataFile(people, new FileInfo("./inventory.csv"), 1);

            //This generates
            // Test_ZipFile
            //      2015
            //          3
            //              18
            //                  751140 2.25.166922918107154891877498685128076062226.dcm
            //                  751140 2.25.179610809676265137473873365625829826423.dcm
            //                  751140 2.25.201969634959506849065133495434871450465.dcm
            //                  751140 2.25.237492679533001779093365416814254319890.dcm
            //                  751140 2.25.316241631782653383510844072713132248731.dcm

            var yearDir = dirToLoad.GetDirectories().Single();

            StringAssert.IsMatch("\\d{4}", yearDir.Name);

            //zip them up
            FileInfo zip = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile) + ".zip")); Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile) + ".zip");

            if (zip.Exists)
            {
                zip.Delete();
            }

            ZipFile.CreateFromDirectory(dirToLoad.FullName, zip.FullName);

            //tell the source to load the zip
            var f = new FlatFileToLoad(zip);

            var source = new DicomFileCollectionSource {
                FilenameField = "RelativeFileArchiveURI"
            };

            if (expressRelative)
            {
                source.ArchiveRoot = TestContext.CurrentContext.TestDirectory;
            }

            source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), new ThrowImmediatelyDataLoadEventListener());

            var tbl         = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            var destination = new DataTableUploadDestination();

            destination.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener());
            destination.AllowResizingColumnsAtUploadTime = true;
            destination.ProcessPipelineData(tbl, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken());
            destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null);

            var finalTable = db.ExpectTable(destination.TargetTableName);

            using (var dt = finalTable.GetDataTable())
            {
                //should be 5 rows in the final table (5 images)
                Assert.AreEqual(5, dt.Rows.Count);

                string pathInDbToDicomFile = (string)dt.Rows[0]["RelativeFileArchiveURI"];

                //We expect either something like:
                // E:/RdmpDicom/Rdmp.Dicom.Tests/bin/Debug/netcoreapp2.2/Test_ZipFile.zip!2015/3/18/2.25.160787663560951826149226183314694084702.dcm
                // ./Test_ZipFile.zip!2015/3/18/2.25.105592977437473375573190160334447272386.dcm

                //the path referenced should be the file read in relative/absolute format
                StringAssert.IsMatch(
                    expressRelative ? $@"./{zip.Name}![\d./]*.dcm":
                    $@"{Regex.Escape(zip.FullName.Replace('\\','/'))}![\d./]*.dcm",
                    pathInDbToDicomFile);

                StringAssert.Contains(yearDir.Name, pathInDbToDicomFile, "Expected zip file to have subdirectories and for them to be loaded correctly");

                //confirm we can read that out again
                using (var pool = new ZipPool())
                {
                    var path = new AmbiguousFilePath(TestContext.CurrentContext.TestDirectory, pathInDbToDicomFile);
                    Assert.IsNotNull(path.GetDataset(pool));
                }
            }

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
Exemple #13
0
        public void Test_ZipFileNotation(bool expressRelative)
        {
            //get a clean database to upload to
            var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer);

            //create a folder in which to generate some dicoms
            var dirToLoad = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFileNotation)));

            if (dirToLoad.Exists)
            {
                dirToLoad.Delete(true);
            }

            dirToLoad.Create();

            //generate some random dicoms
            var r = new Random(999);
            DicomDataGenerator generator = new DicomDataGenerator(r, dirToLoad, "CT")
            {
                MaximumImages = 5
            };
            var people = new PersonCollection();

            people.GeneratePeople(1, r);
            generator.GenerateTestDataFile(people, new FileInfo("./inventory.csv"), 1);

            //This generates
            // Test_ZipFile
            //      2015
            //          3
            //              18
            //                  751140 2.25.166922918107154891877498685128076062226.dcm
            //                  751140 2.25.179610809676265137473873365625829826423.dcm
            //                  751140 2.25.201969634959506849065133495434871450465.dcm
            //                  751140 2.25.237492679533001779093365416814254319890.dcm
            //                  751140 2.25.316241631782653383510844072713132248731.dcm

            var yearDir = dirToLoad.GetDirectories().Single();

            StringAssert.IsMatch("\\d{4}", yearDir.Name);

            //should be 5 images in the zip file
            var dicomFiles = yearDir.GetFiles("*.dcm", SearchOption.AllDirectories);

            Assert.AreEqual(5, dicomFiles.Length);

            //e.g. \2015\3\18\2.25.223398837779449245317520567111874824918.dcm
            //e.g. \2015\3\18\2.25.179610809676265137473873365625829826423.dcm
            var relativePathWithinZip1 = dicomFiles[0].FullName.Substring(dirToLoad.FullName.Length);
            var relativePathWithinZip2 = dicomFiles[1].FullName.Substring(dirToLoad.FullName.Length);

            //zip them up
            FileInfo zip = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile) + ".zip")); Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile) + ".zip");

            if (zip.Exists)
            {
                zip.Delete();
            }

            ZipFile.CreateFromDirectory(dirToLoad.FullName, zip.FullName);

            //e.g. E:\RdmpDicom\Rdmp.Dicom.Tests\bin\Debug\netcoreapp2.2\Test_ZipFile.zip!\2015\3\18\2.25.223398837779449245317520567111874824918.dcm
            string pathToLoad1 = zip.FullName + "!" + relativePathWithinZip1;
            string pathToLoad2 = zip.FullName + "!" + relativePathWithinZip2;

            var loadMeTextFile = new FileInfo(Path.Combine(dirToLoad.FullName, "LoadMe.txt"));

            //tell the source to load the zip
            File.WriteAllText(loadMeTextFile.FullName, string.Join(Environment.NewLine, pathToLoad1, pathToLoad2));

            var f = new FlatFileToLoad(loadMeTextFile);

            //Setup source
            var source = new DicomFileCollectionSource {
                FilenameField = "RelativeFileArchiveURI"
            };

            if (expressRelative)
            {
                source.ArchiveRoot = TestContext.CurrentContext.TestDirectory;
            }

            var worklist = new FlatFileToLoadDicomFileWorklist(f);

            //Setup destination
            var destination = new DataTableUploadDestination {
                AllowResizingColumnsAtUploadTime = true
            };

            //setup pipeline
            var contextFactory = new DataFlowPipelineContextFactory <DataTable>();
            var context        = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.FixedDestination);

            //run pipeline
            var pipe = new DataFlowPipelineEngine <DataTable>(context, source, destination, new ThrowImmediatelyDataLoadEventListener());

            pipe.Initialize(db, worklist);
            pipe.ExecutePipeline(new GracefulCancellationToken());

            var finalTable = db.ExpectTable(destination.TargetTableName);

            using (var dt = finalTable.GetDataTable())
            {
                //should be 2 rows (since we told it to only load 2 files out of the zip)
                Assert.AreEqual(2, dt.Rows.Count);

                string pathInDbToDicomFile = (string)dt.Rows[0]["RelativeFileArchiveURI"];

                //We expect either something like:
                // E:/RdmpDicom/Rdmp.Dicom.Tests/bin/Debug/netcoreapp2.2/Test_ZipFile.zip!2015/3/18/2.25.160787663560951826149226183314694084702.dcm
                // ./Test_ZipFile.zip!2015/3/18/2.25.105592977437473375573190160334447272386.dcm

                //the path referenced should be the file read in relative/absolute format
                StringAssert.IsMatch(
                    expressRelative ? $@"./{zip.Name}![\d./]*.dcm":
                    $@"{Regex.Escape(zip.FullName.Replace('\\','/'))}![\d./]*.dcm",
                    pathInDbToDicomFile);

                StringAssert.Contains(yearDir.Name, pathInDbToDicomFile, "Expected zip file to have subdirectories and for them to be loaded correctly");

                //confirm we can read that out again
                using (var pool = new ZipPool())
                {
                    var path = new AmbiguousFilePath(TestContext.CurrentContext.TestDirectory, pathInDbToDicomFile);
                    Assert.IsNotNull(path.GetDataset(pool));
                }
            }

            Assert.IsTrue(finalTable.Exists());
            finalTable.Drop();
        }
        private ExtractCommandState ExtractSupportingSql(SupportingSQLTable sql, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for global SupportingSQL " + sql.Name));
                using (var con = sql.GetServer().GetConnection())
                {
                    con.Open();
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command " + sql.SQL));
                    var cmd = DatabaseCommandHelper.GetCommand(sql.SQL, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(sql.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sql.SQL, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from SupportingSQL " + sql.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for SupportingSQL: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult(sql.SQL, sql);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                    else
                    {
                        var extractGlobalsCommand = (_request as ExtractGlobalsCommand);
                        Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
                        var result =
                            new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
                                                              extractGlobalsCommand.Configuration,
                                                              sql.SQL,
                                                              sql);
                        result.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                        extractGlobalsCommand.ExtractionResults.Add(result);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of SupportingSQL " + sql + " failed ", e));
                return(ExtractCommandState.Crashed);
            }

            return(ExtractCommandState.Completed);
        }