Exemple #1
0
        /// <summary>
        /// Extracts the <paramref name="doc"/> into the supplied <paramref name="directory"/> (unless overridden to put it somewhere else)
        /// </summary>
        /// <param name="doc"></param>
        /// <param name="directory"></param>
        /// <param name="listener"></param>
        /// <returns></returns>
        protected virtual bool TryExtractSupportingDocument(SupportingDocument doc, DirectoryInfo directory, IDataLoadEventListener listener)
        {
            SupportingDocumentsFetcher fetcher = new SupportingDocumentsFetcher(doc);

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Preparing to copy " + doc + " to directory " + directory.FullName));
            try
            {
                var outputPath = fetcher.ExtractToDirectory(directory);
                if (_request is ExtractDatasetCommand)
                {
                    var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                    var supplementalResult = result.AddSupplementalExtractionResult(null, doc);
                    supplementalResult.CompleteAudit(this.GetType(), outputPath, 0);
                }
                else
                {
                    var extractGlobalsCommand = (_request as ExtractGlobalsCommand);
                    Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
                    var result = new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
                                                                   extractGlobalsCommand.Configuration,
                                                                   null,
                                                                   doc);
                    result.CompleteAudit(this.GetType(), outputPath, 0);
                    extractGlobalsCommand.ExtractionResults.Add(result);
                }

                return(true);
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to copy file " + doc + " to directory " + directory.FullName, e));
                return(false);
            }
        }
        private void WriteBundleContents(IExtractableDatasetBundle datasetBundle, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            var bundle = ((ExtractDatasetCommand)_request).DatasetBundle;

            foreach (var sql in bundle.SupportingSQL)
            {
                bundle.States[sql] = ExtractSupportingSql(sql, listener, _dataLoadInfo);
            }

            foreach (var document in ((ExtractDatasetCommand)_request).DatasetBundle.Documents)
            {
                bundle.States[document] = ExtractSupportingDocument(_request.GetExtractionDirectory(), document, listener);
            }

            //extract lookups
            foreach (BundledLookupTable lookup in datasetBundle.LookupTables)
            {
                try
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to extract lookup " + lookup));

                    ExtractLookupTableSql(lookup, listener, _dataLoadInfo);

                    datasetBundle.States[lookup] = ExtractCommandState.Completed;
                }
                catch (Exception e)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error occurred trying to extract lookup " + lookup + " on server " + lookup.TableInfo.Server, e));

                    datasetBundle.States[lookup] = ExtractCommandState.Crashed;
                }
            }

            haveExtractedBundledContent = true;
        }
Exemple #3
0
        private ExitCodeType DownloadFilesOnFTP(ILoadDirectory destination, IDataLoadEventListener listener)
        {
            string[] files = GetFileList();

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, files.Aggregate("Identified the following files on the FTP server:", (s, f) => f + ",").TrimEnd(',')));

            bool forLoadingContainedCachedFiles = false;

            foreach (string file in files)
            {
                var action = GetSkipActionForFile(file, destination);

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "File " + file + " was evaluated as " + action));
                if (action == SkipReason.DoNotSkip)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download " + file));
                    Download(file, destination, listener);
                }

                if (action == SkipReason.InForLoading)
                {
                    forLoadingContainedCachedFiles = true;
                }
            }

            //if no files were downloaded (and there were none skiped because they were in forLoading) and in that eventuality we have our flag set to return LoadNotRequired then do so
            if (!forLoadingContainedCachedFiles && !_filesRetrieved.Any() && SendLoadNotRequiredIfFileNotFound)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Could not find any files on the remote server worth downloading, so returning LoadNotRequired"));
                return(ExitCodeType.OperationNotRequired);
            }

            //otherwise it was a success - even if no files were actually retrieved... hey that's what the user said, otherwise he would have set SendLoadNotRequiredIfFileNotFound
            return(ExitCodeType.Success);
        }
Exemple #4
0
        public virtual void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventListener)
        {
            if (exitCode == ExitCodeType.Success && DeleteFilesOffFTPServerAfterSuccesfulDataLoad)
            {
                foreach (string file in _filesRetrieved)
                {
                    FtpWebRequest reqFTP;
                    reqFTP             = (FtpWebRequest)FtpWebRequest.Create(new Uri(file));
                    reqFTP.Credentials = new NetworkCredential(_username, _password);
                    reqFTP.KeepAlive   = false;
                    reqFTP.Method      = WebRequestMethods.Ftp.DeleteFile;
                    reqFTP.UseBinary   = true;
                    reqFTP.Proxy       = null;
                    reqFTP.UsePassive  = true;
                    reqFTP.EnableSsl   = _useSSL;

                    FtpWebResponse response = (FtpWebResponse)reqFTP.GetResponse();

                    if (response.StatusCode != FtpStatusCode.FileActionOK)
                    {
                        postLoadEventListener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Attempt to delete file at URI " + file + " resulted in response with StatusCode = " + response.StatusCode));
                    }
                    else
                    {
                        postLoadEventListener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Deleted FTP file at URI " + file + " status code was " + response.StatusCode));
                    }

                    response.Close();
                }
            }
        }
Exemple #5
0
        public void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventListener)
        {
            if (exitCode == ExitCodeType.Success || exitCode == ExitCodeType.OperationNotRequired)
            {
                int countOfEntriesThatDisapeared = _entriesUnzipped.Count(e => !e.Exists);

                if (countOfEntriesThatDisapeared != 0)
                {
                    postLoadEventListener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning,
                                                                             countOfEntriesThatDisapeared + " of " + _entriesUnzipped.Count + " entries were created by " +
                                                                             GetType().Name +
                                                                             " during unzip phase but had disapeared at cleanup time - following successful data load"));
                }

                //cleanup required
                foreach (FileInfo f in _entriesUnzipped.Where(e => e.Exists))
                {
                    try
                    {
                        f.Delete();
                    }
                    catch (Exception e)
                    {
                        postLoadEventListener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Could not delete file " + f.FullName, e));
                    }
                }
            }
        }
        private void ResolvePrimaryKeyConflicts(IDataLoadEventListener job)
        {
            using (var con = (SqlConnection)_dbInfo.Server.GetConnection())
            {
                con.Open();

                PrimaryKeyCollisionResolver resolver       = new PrimaryKeyCollisionResolver(TargetTable);
                SqlCommand cmdAreTherePrimaryKeyCollisions = new SqlCommand(resolver.GenerateCollisionDetectionSQL(), con);
                cmdAreTherePrimaryKeyCollisions.CommandTimeout = 5000;

                //if there are no primary key collisions
                if (cmdAreTherePrimaryKeyCollisions.ExecuteScalar().ToString().Equals("0"))
                {
                    job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "No primary key collisions detected"));
                    return;
                }

                //there are primary key collisions so resolve them
                job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Primary key collisions detected"));

                SqlCommand cmdResolve = new SqlCommand(resolver.GenerateSQL(), con);
                cmdResolve.CommandTimeout = 5000;
                int affectedRows = cmdResolve.ExecuteNonQuery();

                job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Primary key collisions resolved by deleting " + affectedRows + " rows"));
                con.Close();
            }
        }
        public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            _sw.Start();

            if (_datasetListWorklist == null)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipping component because _datasetListWorklist is null"));
                return(null);
            }

            var          currentBatch = BatchSize;
            DicomDataset ds;

            var dt = GetDataTable();

            while (currentBatch > 0 && (ds = _datasetListWorklist.GetNextDatasetToProcess(out var filename, out var otherValuesToStoreInRow)) != null)
            {
                string filename1 = filename;
                Dictionary <string, string> row = otherValuesToStoreInRow;
                DicomDataset ds1 = ds;

                ProcessDataset(filename1, ds1, dt, listener, row);
                currentBatch--;
            }

            _sw.Stop();
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "GetChunk cumulative total time is " + _sw.ElapsedMilliseconds + "ms"));

            return(dt.Rows.Count > 0 ? dt : null);
        }
        private void ProcessDir(string dir, DataTable dt, IDataLoadEventListener listener)
        {
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Starting '{dir}'"));

            if (File.Exists(dir))
            {
                // the inventory entry is a xml file directly :o
                XmlToRows(dir, dt, listener);
                return;
            }

            if (!Directory.Exists(dir))
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, $"'{dir}' was not a Directory or File"));
                return;
            }

            var matches = Directory.GetFiles(dir, SearchPattern, SearchOption.AllDirectories);

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Found {matches.Length} CFind files in {dir}"));

            foreach (var file in matches)
            {
                XmlToRows(file, dt, listener);

                if (filesRead++ % 10000 == 0)
                {
                    listener.OnProgress(this, new ProgressEventArgs("Reading files", new ProgressMeasurement(filesRead, ProgressType.Records, matches.Length), timer?.Elapsed ?? TimeSpan.Zero));
                }
            }
        }
Exemple #9
0
        protected override void PreInitializeImpl(IExtractCommand request, IDataLoadEventListener listener)
        {
            if (_request is ExtractGlobalsCommand)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Request is for the extraction of Globals."));
                OutputFile = _request.GetExtractionDirectory().FullName;
                return;
            }

            switch (FlatFileType)
            {
            case ExecuteExtractionToFlatFileType.CSV:
                OutputFile = Path.Combine(DirectoryPopulated.FullName, GetFilename() + ".csv");
                if (request.Configuration != null)
                {
                    _output = new CSVOutputFormat(OutputFile, request.Configuration.Separator, DateFormat);
                }
                else
                {
                    _output = new CSVOutputFormat(OutputFile, ",", DateFormat);
                }
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Setup data extraction destination as " + OutputFile + " (will not exist yet)"));
        }
        protected override void TryExtractSupportingSQLTableImpl(SupportingSQLTable sqlTable, DirectoryInfo directory, IExtractionConfiguration configuration, IDataLoadEventListener listener, out int linesWritten,
                                                                 out string destinationDescription)
        {
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for global SupportingSQL " + sqlTable.SQL));
            using (var con = sqlTable.GetServer().GetConnection())
            {
                con.Open();
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command " + sqlTable.SQL));
                var cmd = DatabaseCommandHelper.GetCommand(sqlTable.SQL, con);
                var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                var sw = new Stopwatch();

                sw.Start();
                DataTable dt = new DataTable();
                da.Fill(dt);

                dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleEntityNameFromString(sqlTable.Name));
                linesWritten = dt.Rows.Count;

                var destinationDb = GetDestinationDatabase(listener);
                var tbl           = destinationDb.ExpectTable(dt.TableName);

                if (tbl.Exists())
                {
                    tbl.Drop();
                }

                destinationDb.CreateTable(dt.TableName, dt);
                destinationDescription = TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName;
            }
        }
        public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny)
        {
            if (_destination != null)
            {
                _destination.Dispose(listener, pipelineFailureExceptionIfAny);

                //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails
                if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails)
                {
                    if (_destinationDatabase != null)
                    {
                        var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);

                        if (tbl.Exists())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl));
                            tbl.Drop();
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl));
                        }
                    }
                }
            }

            TableLoadInfo?.CloseAndArchive();

            // also close off the cumulative extraction result
            if (_request is ExtractDatasetCommand)
            {
                var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults;
                if (result != null && _toProcess != null)
                {
                    result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts);
                }
            }
        }
Exemple #12
0
        public void StartDaemon(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            const int sleepInSeconds = 60;

            while (!cancellationToken.IsCancellationRequested)
            {
                RetrieveNewDataForCache(listener, cancellationToken);
                listener.OnNotify(this,
                                  new NotifyEventArgs(ProgressEventType.Information, "Sleeping for " + sleepInSeconds + " seconds"));

                // wake up every sleepInSeconds to re-check if we can download any new data, but check more regularly to see if cancellation has been requested
                var beenAsleepFor = new Stopwatch();
                beenAsleepFor.Start();
                while (beenAsleepFor.ElapsedMilliseconds < (sleepInSeconds * 1000))
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        listener.OnNotify(this,
                                          new NotifyEventArgs(ProgressEventType.Information, "Cancellation has been requested"));
                        break;
                    }

                    Task.Delay(100).Wait();
                }
            }

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Daemon has stopped"));
        }
Exemple #13
0
        public void PreInitialize(TableInfo target, IDataLoadEventListener listener)
        {
            if (target == null)
            {
                throw new Exception("Without TableInfo we cannot figure out what columns to clean");
            }

            _taskDescription = "Clean Strings " + target.GetRuntimeName() + ":";

            foreach (ColumnInfo col in target.ColumnInfos)
            {
                if (col.Data_type != null && col.Data_type.Contains("char"))
                {
                    columnsToClean.Add(col.GetRuntimeName());
                }
            }
            if (columnsToClean.Any())
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
                                                            "Preparing to perform clean " + columnsToClean.Count + " string columns (" +
                                                            string.Join(",", columnsToClean) + ") in table " + target.GetRuntimeName()));
            }
            else
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information,
                                                            "Skipping CleanString on table " + target.GetRuntimeName() + " because there are no String columns in the table"));
            }
        }
Exemple #14
0
        private void GetWhitelist(IDataLoadEventListener listener)
        {
            _whitelist = new HashSet <string>();

            var db     = DataAccessPortal.GetInstance().ExpectDatabase(PatientIdWhitelistColumnInfo.TableInfo, DataAccessContext.DataLoad);
            var server = db.Server;

            var qb = new QueryBuilder("distinct", null);

            qb.AddColumn(new ColumnInfoToIColumn(new MemoryRepository(), PatientIdWhitelistColumnInfo));

            var sql = qb.SQL;

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Downloading Whitelist with SQL:" + sql));

            using (var con = server.GetConnection())
            {
                con.Open();
                var r = server.GetCommand(sql, con).ExecuteReader();

                while (r.Read())
                {
                    var o = r[PatientIdWhitelistColumnInfo.GetRuntimeName()];
                    if (o == null || o == DBNull.Value)
                    {
                        continue;
                    }

                    _whitelist.Add(o.ToString());
                }
            }

            listener.OnNotify(this, new NotifyEventArgs(_whitelist.Count == 0 ? ProgressEventType.Error : ProgressEventType.Information, "Whitelist contained " + _whitelist.Count + " identifiers"));
        }
Exemple #15
0
        private void RetrieveNewDataForCache(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Retrieving new data"));

            var combinedToken = cancellationToken.CreateLinkedSource().Token;

            // Start a task for each cache download permission window and wait until completion
            var tasks =
                _downloaders.Select(
                    downloader =>
                    Task.Run(() => DownloadUntilFinished(downloader, listener, cancellationToken), combinedToken))
                .ToArray();

            try
            {
                Task.WaitAll(tasks);
            }
            catch (AggregateException e)
            {
                var operationCanceledException = e.GetExceptionIfExists <OperationCanceledException>();
                if (operationCanceledException != null)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Operation cancelled", e));
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Exception in downloader task whilst caching data", e));
                    throw;
                }
            }
        }
Exemple #16
0
        private void AsyncCopyMDFFilesWithEvents(string MDFSource, string MDFDestination, string LDFSource, string LDFDestination, IDataLoadEventListener job)
        {
            Stopwatch s = new Stopwatch();

            s.Start();

            job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Starting copy from " + MDFSource + " to " + MDFDestination));
            job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Starting copy from " + LDFSource + " to " + LDFDestination));

            CopyWithProgress copyMDF = new CopyWithProgress();

            copyMDF.Progress +=
                (size, transferred, streamSize, bytesTransferred, number, reason, file, destinationFile, data) =>
            {
                job.OnProgress(this, new ProgressEventArgs(MDFDestination, new ProgressMeasurement((int)(transferred * 0.001), ProgressType.Kilobytes), s.Elapsed));
                return(CopyWithProgress.CopyProgressResult.PROGRESS_CONTINUE);
            };
            copyMDF.XCopy(MDFSource, MDFDestination);
            s.Reset();
            s.Start();

            CopyWithProgress copyLDF = new CopyWithProgress();

            copyLDF.Progress +=
                (size, transferred, streamSize, bytesTransferred, number, reason, file, destinationFile, data) =>
            {
                job.OnProgress(this, new ProgressEventArgs(LDFDestination, new ProgressMeasurement((int)(transferred * 0.001), ProgressType.Kilobytes), s.Elapsed));
                return(CopyWithProgress.CopyProgressResult.PROGRESS_CONTINUE);
            };
            copyLDF.XCopy(LDFSource, LDFDestination);
            s.Stop();
        }
        private void CloseConnection(IDataLoadEventListener listener)
        {
            if (_isDisposed)
            {
                return;
            }

            _isDisposed = true;
            try
            {
                if (TableLoadInfo != null)
                {
                    TableLoadInfo.CloseAndArchive();
                }

                if (_copy != null)
                {
                    _copy.Dispose();
                }

                if (_recordsWritten == 0)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Warning, 0 records written by SqlBulkInsertDestination (" + _dbInfo.Server + "," + _dbInfo.GetRuntimeName() + ")"));
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Could not close connection to server", e));
            }

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "SqlBulkCopy closed after writing " + _recordsWritten + " rows to the server.  Total time spent writing to server:" + _timer.Elapsed));
        }
        /// <summary>
        /// Called when <see cref="PerPatient"/> is false.  Called once per extraction
        /// </summary>
        public virtual void MoveAll(DirectoryInfo destinationDirectory, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            bool atLeastOne = false;

            foreach (var e in LocationOfFiles.EnumerateFileSystemInfos(Pattern))
            {
                if (Directories && e is DirectoryInfo dir)
                {
                    var dest = Path.Combine(destinationDirectory.FullName, dir.Name);

                    // Recursively copy all files from input path to destination path
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, ($"Copying directory '{e.FullName}' to '{dest}'")));
                    CopyFolder(e.FullName, dest);
                    atLeastOne = true;
                }

                if (!Directories && e is FileInfo f)
                {
                    var dest = Path.Combine(destinationDirectory.FullName, f.Name);
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, ($"Copying file '{f.FullName}' to '{dest}'")));
                    File.Copy(f.FullName, dest);
                    atLeastOne = true;
                }
            }

            if (!atLeastOne)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, $"No {(Directories ? "Directories": "Files")} were found matching Pattern {Pattern} in {LocationOfFiles.FullName}"));
            }
        }
Exemple #19
0
        /// <summary>
        /// The cache sub-directory for a particular load schedule within a load metadata. Uses a resolver for dataset-specific cache layout knowledge
        /// </summary>
        /// <param name="listener"></param>
        /// <returns></returns>
        public virtual DirectoryInfo GetLoadCacheDirectory(IDataLoadEventListener listener)
        {
            if (Resolver == null)
            {
                throw new Exception("No ILoadCachePathResolver has been set on CacheLayout " + this + ", this tells the system whether there are subdirectories and which one to use for a given ICacheLayout, if you don't have one use a new NoSubdirectoriesCachePathResolver() in your ICacheLayout constructor");
            }

            if (RootDirectory == null)
            {
                throw new NullReferenceException("RootDirectory has not been set yet");
            }

            var downloadDirectory = Resolver.GetLoadCacheDirectory(RootDirectory);

            if (downloadDirectory == null)
            {
                throw new Exception("Resolver " + Resolver + " of type " + Resolver.GetType().FullName + " returned null from GetLoadCacheDirectory");
            }

            if (downloadDirectory.Exists)
            {
                listener.OnNotify(this,
                                  new NotifyEventArgs(ProgressEventType.Trace,
                                                      "Download Directory Is:" + downloadDirectory.FullName));
            }
            else
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Download Directory Did Not Exist:" + downloadDirectory.FullName));

                downloadDirectory.Create();

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Created Download Directory:" + downloadDirectory.FullName));
            }
            return(downloadDirectory);
        }
Exemple #20
0
        protected bool TryExtractSupportingSQLTable(SupportingSQLTable sql, DirectoryInfo directory, IExtractionConfiguration configuration, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Preparing to extract Supporting SQL " + sql + " to directory " + directory.FullName));

                Stopwatch sw = new Stopwatch();
                sw.Start();

                //start auditing it as a table load
                string target        = Path.Combine(directory.FullName, sql.Name + ".csv");
                var    tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", target, new[] { new DataSource(sql.SQL, DateTime.Now) }, -1);

                TryExtractSupportingSQLTableImpl(sql, directory, configuration, listener, out int sqlLinesWritten, out string description);

                sw.Stop();

                //end auditing it
                tableLoadInfo.Inserts = sqlLinesWritten;
                tableLoadInfo.CloseAndArchive();

                if (_request is ExtractDatasetCommand)
                {
                    var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                    var supplementalResult = result.AddSupplementalExtractionResult(sql.SQL, sql);
                    supplementalResult.CompleteAudit(this.GetType(), description, sqlLinesWritten);
                }
                else
                {
                    var extractGlobalsCommand = (_request as ExtractGlobalsCommand);
                    Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
                    var result =
                        new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
                                                          extractGlobalsCommand.Configuration,
                                                          sql.SQL,
                                                          sql);
                    result.CompleteAudit(this.GetType(), description, sqlLinesWritten);
                    extractGlobalsCommand.ExtractionResults.Add(result);
                }

                listener.OnProgress(this, new ProgressEventArgs("Extract " + sql, new ProgressMeasurement(sqlLinesWritten, ProgressType.Records), sw.Elapsed));
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Extracted " + sqlLinesWritten + " records from SupportingSQL " + sql + " into directory " + directory.FullName));

                return(true);
            }
            catch (Exception e)
            {
                if (e is SqlException)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to run extraction SQL (make sure to fully specify all database/table/column objects completely):" + Environment.NewLine + sql.SQL, e));
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to extract " + sql + " into directory " + directory.FullName, e));
                }

                return(false);
            }
        }
        private void WriteBundleContents(IExtractableDatasetBundle datasetBundle, IDataLoadEventListener job, GracefulCancellationToken cancellationToken)
        {
            var rootDir             = _request.GetExtractionDirectory();
            var supportingSQLFolder = new DirectoryInfo(Path.Combine(rootDir.FullName, SupportingSQLTable.ExtractionFolderName));
            var lookupDir           = rootDir.CreateSubdirectory("Lookups");

            //extract the documents
            foreach (SupportingDocument doc in datasetBundle.Documents)
            {
                datasetBundle.States[doc] = TryExtractSupportingDocument(rootDir, doc, job)
                    ? ExtractCommandState.Completed
                    : ExtractCommandState.Crashed;
            }

            //extract supporting SQL
            foreach (SupportingSQLTable sql in datasetBundle.SupportingSQL)
            {
                datasetBundle.States[sql] = TryExtractSupportingSQLTable(supportingSQLFolder, _request.Configuration, sql, job, _dataLoadInfo)
                    ? ExtractCommandState.Completed
                    : ExtractCommandState.Crashed;
            }

            //extract lookups
            foreach (BundledLookupTable lookup in datasetBundle.LookupTables)
            {
                try
                {
                    job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to extract lookup " + lookup));

                    var server = DataAccessPortal.GetInstance().ExpectServer(lookup.TableInfo, DataAccessContext.DataExport);

                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    //extracts all of them
                    var extractTableVerbatim = new ExtractTableVerbatim(server, new [] { lookup.TableInfo.Name }, lookupDir, _request.Configuration.Separator, DateFormat);
                    int linesWritten         = extractTableVerbatim.DoExtraction();
                    sw.Stop();
                    job.OnProgress(this, new ProgressEventArgs("Lookup " + lookup, new ProgressMeasurement(linesWritten, ProgressType.Records), sw.Elapsed));

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult("SELECT * FROM " + lookup.TableInfo.Name, lookup.TableInfo);
                        supplementalResult.CompleteAudit(this.GetType(), extractTableVerbatim.OutputFilename, linesWritten);
                    }

                    datasetBundle.States[lookup] = ExtractCommandState.Completed;
                }
                catch (Exception e)
                {
                    job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error occurred trying to extract lookup " + lookup + " on server " + lookup.TableInfo.Server, e));

                    datasetBundle.States[lookup] = ExtractCommandState.Crashed;
                }
            }

            haveWrittenBundleContents = true;
        }
Exemple #22
0
        public override string HackExtractionSQL(string sql, IDataLoadEventListener listener)
        {
            SetServer();

            //call base hacks
            sql = base.HackExtractionSQL(sql, listener);

            if (_doNotMigrate)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Cohort and Data are on same server so no migration will occur"));
                return(sql);
            }

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Original (unhacked) SQL was " + sql, null));

            //now replace database with tempdb
            var extractableCohort       = Request.ExtractableCohort;
            var extractableCohortSource = extractableCohort.ExternalCohortTable;

            var syntaxHelperFactory = new QuerySyntaxHelperFactory();
            var sourceSyntax        = syntaxHelperFactory.Create(extractableCohortSource.DatabaseType);
            var destinationSyntax   = syntaxHelperFactory.Create(_server.DatabaseType);

            //To replace (in this order)
            //Cohort database.table.privateId
            //Cohort database.table.releaseId
            //Cohort database.table.cohortdefinitionId
            //Cohort database.table name
            Dictionary <string, string> replacementStrings = new Dictionary <string, string>();

            var sourceDb                 = sourceSyntax.GetRuntimeName(extractableCohortSource.Database);
            var sourceTable              = sourceSyntax.GetRuntimeName(extractableCohortSource.TableName);
            var sourcePrivateId          = sourceSyntax.GetRuntimeName(extractableCohort.GetPrivateIdentifier());
            var sourceReleaseId          = sourceSyntax.GetRuntimeName(extractableCohort.GetReleaseIdentifier());
            var sourceCohortDefinitionId = sourceSyntax.GetRuntimeName(extractableCohortSource.DefinitionTableForeignKeyField);

            //Swaps the given entity for the same entity but in _tempDb
            AddReplacement(replacementStrings, sourceDb, sourceTable, sourcePrivateId, sourceSyntax, destinationSyntax);
            AddReplacement(replacementStrings, sourceDb, sourceTable, sourceReleaseId, sourceSyntax, destinationSyntax);
            AddReplacement(replacementStrings, sourceDb, sourceTable, sourceCohortDefinitionId, sourceSyntax, destinationSyntax);
            AddReplacement(replacementStrings, sourceDb, sourceTable, sourceSyntax, destinationSyntax);

            foreach (KeyValuePair <string, string> r in replacementStrings)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Replacing '" + r.Key + "' with '" + r.Value + "'", null));

                if (!sql.Contains(r.Key))
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "SQL extraction query string did not contain the text '" + r.Key + "' (which we expected to replace with '" + r.Value + ""));
                }

                sql = sql.Replace(r.Key, r.Value);
            }

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Adjusted (hacked) SQL was " + sql, null));

            //replace [MyCohortDatabase].. with [tempdb].. (while dealing with Cohort..Cohort replacement correctly as well as 'Cohort.dbo.Cohort.Fish' correctly)
            return(sql);
        }
Exemple #23
0
        /// <summary>
        /// Commits the cohort created into the database (assuming no error occured during pipeline processing - See <paramref name="pipelineFailureExceptionIfAny"/>).
        /// </summary>
        /// <param name="listener"></param>
        /// <param name="pipelineFailureExceptionIfAny"></param>
        public virtual void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny)
        {
            //it exceptioned
            if (pipelineFailureExceptionIfAny != null)
            {
                return;
            }

            var db = Request.NewCohortDefinition.LocationOfCohort.Discover();

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Preparing upload"));

            using (var connection = db.Server.BeginNewTransactedConnection())
            {
                try
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Started Transaction"));
                    Request.PushToServer(connection);

                    if (Request.NewCohortDefinition.ID == null)
                    {
                        throw new Exception("We pushed the new cohort from the request object to the server (within transaction) but it's ID was not populated");
                    }

                    var tbl = db.ExpectTable(Request.NewCohortDefinition.LocationOfCohort.TableName);

                    using (var bulkCopy = tbl.BeginBulkInsert(connection.ManagedTransaction))
                    {
                        var dt = new DataTable();
                        dt.Columns.Add(_privateIdentifier);
                        dt.Columns.Add(_releaseIdentifier);

                        //add the ID as another column
                        dt.Columns.Add(_fk);

                        foreach (KeyValuePair <object, object> kvp in _cohortDictionary)
                        {
                            dt.Rows.Add(kvp.Key, kvp.Value, Request.NewCohortDefinition.ID);
                        }

                        bulkCopy.Upload(dt);
                    }

                    connection.ManagedTransaction.CommitAndCloseConnection();
                }
                catch
                {
                    connection.ManagedTransaction.AbandonAndCloseConnection();
                    throw;
                }
            }

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Succesfully uploaded " + _cohortDictionary.Count + " records"));

            int id = Request.ImportAsExtractableCohort(DeprecateOldCohortOnSuccess);

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Cohort successfully comitted to destination and imported as an RDMP ExtractableCohort (ID=" + id + " <- this is the ID of the reference pointer, the cohortDefinitionID of the actual cohort remains as you specified:" + Request.NewCohortDefinition.ID + ")"));
        }
Exemple #24
0
        protected override void MutilateTable(IDataLoadEventListener job, ITableInfo tableInfo, DiscoveredTable table)
        {
            var server = table.Database.Server;

            job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to run Coalese on table " + table));

            var allCols = table.DiscoverColumns();

            var pkColumnInfos = tableInfo.ColumnInfos.Where(c => c.IsPrimaryKey).Select(c => c.GetRuntimeName()).ToArray();
            var nonPks        = allCols.Where(c => !pkColumnInfos.Contains(c.GetRuntimeName())).ToArray();
            var pks           = allCols.Except(nonPks).ToArray();

            if (!pkColumnInfos.Any())
            {
                throw new Exception("Table '" + tableInfo + "' has no IsPrimaryKey columns");
            }

            if (allCols.Length == pkColumnInfos.Length)
            {
                job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipping Coalesce on table " + table + " because it has no non primary key columns"));
                return;
            }

            //Get an update command for each non primary key column
            Dictionary <string, Task <int> > sqlCommands = new Dictionary <string, Task <int> >();

            foreach (DiscoveredColumn nonPk in nonPks)
            {
                sqlCommands.Add(GetCommand(table, pks, nonPk), null);
            }

            server.EnableAsync();

            using (var con = table.Database.Server.GetConnection())
            {
                con.Open();

                if (CreateIndex)
                {
                    var idxCmd = server.GetCommand(string.Format(@"CREATE INDEX IX_PK_{0} ON {0}({1});", table.GetRuntimeName(), string.Join(",", pks.Select(p => p.GetRuntimeName()))), con);
                    idxCmd.CommandTimeout = Timeout;
                    idxCmd.ExecuteNonQuery();
                }

                foreach (var sql in sqlCommands.Keys.ToArray())
                {
                    var cmd = server.GetCommand(sql, con);
                    cmd.CommandTimeout = Timeout;
                    sqlCommands[sql]   = cmd.ExecuteNonQueryAsync();
                }

                Task.WaitAll(sqlCommands.Values.ToArray());
            }

            int affectedRows = sqlCommands.Values.Sum(t => t.Result);

            job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Coalesce on table '" + table + "' completed (" + affectedRows + " rows affected)"));
        }
        public void FileIsEmpty()
        {
            if (_throwOnEmptyFiles)
            {
                throw new FlatFileLoadException("File " + _fileToLoad + " is empty");
            }

            _listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "File " + _fileToLoad + " is empty"));
        }
Exemple #26
0
        private void DownloadUntilFinished(PermissionWindowCacheDownloader downloader, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            try
            {
                while (true)
                {
                    cancellationToken.ThrowIfCancellationRequested();

                    var result = RetryMode ?
                                 downloader.RetryDownload(listener, cancellationToken) :
                                 downloader.Download(listener, cancellationToken);

                    switch (result)
                    {
                    case RetrievalResult.NotPermitted:

                        if (TerminateIfOutsidePermissionWindow)
                        {
                            listener.OnNotify(this,
                                              new NotifyEventArgs(ProgressEventType.Information,
                                                                  "Download not permitted at this time so exitting"));

                            return;
                        }

                        listener.OnNotify(this,
                                          new NotifyEventArgs(ProgressEventType.Information,
                                                              "Download not permitted at this time, sleeping for 60 seconds"));

                        // Sleep for a while, but keep one eye open for cancellation requests
                        const int sleepTime = 60000;
                        const int cancellationCheckInterval = 1000;
                        var       elapsedTime = 0;
                        while (elapsedTime < sleepTime)
                        {
                            Task.Delay(cancellationCheckInterval).Wait();
                            cancellationToken.ThrowIfCancellationRequested();
                            elapsedTime += cancellationCheckInterval;
                        }
                        break;

                    case RetrievalResult.Complete:
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Download completed successfully."));
                        return;

                    default:
                        listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Download ended: " + result));
                        return;
                    }
                }
            }
            catch (OperationCanceledException)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Cache download cancelled: " + downloader));
            }
        }
        public override SMIDataChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken)
        {
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"ProcessBasedCacheSource version is {typeof(ProcessBasedCacheSource).Assembly.GetName().Version}.  Assembly is {typeof(ProcessBasedCacheSource).Assembly} "));

            // Where we are putting the files
            var cacheDir    = new LoadDirectory(Request.CacheProgress.LoadProgress.LoadMetadata.LocationOfFlatFiles).Cache;
            var cacheLayout = new SMICacheLayout(cacheDir, new SMICachePathResolver("ALL"));

            Chunk = new SMIDataChunk(Request)
            {
                FetchDate = Request.Start,
                Modality  = "ALL",
                Layout    = cacheLayout
            };

            var workingDirectory = cacheLayout.GetLoadCacheDirectory(listener);

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Working directory is:" + workingDirectory));
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Fetch Start is:" + request.Start));
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Fetch End is:" + request.End));

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Command is:" + Command));
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Args template is:" + Args));
            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Datetime format is:" + TimeFormat));


            string args = Args
                          .Replace("%s", request.Start.ToString(TimeFormat))
                          .Replace("%e", request.End.ToString(TimeFormat))
                          .Replace("%d", workingDirectory.FullName);

            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Args resolved is:" + args));

            using (var p = new Process())
            {
                p.StartInfo.FileName               = Command;
                p.StartInfo.Arguments              = args;
                p.StartInfo.UseShellExecute        = false;
                p.StartInfo.RedirectStandardOutput = true;
                p.OutputDataReceived              += (sender, a) => listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, a.Data));

                p.Start();
                p.BeginOutputReadLine();

                p.WaitForExit();

                listener.OnNotify(this, new NotifyEventArgs(p.ExitCode == 0 ? ProgressEventType.Information : ProgressEventType.Warning, "Process exited with code " + p.ExitCode));

                if (p.ExitCode != 0 && ThrowOnNonZeroExitCode)
                {
                    throw new Exception("Process exited with code " + p.ExitCode);
                }
            }

            return(Chunk);
        }
Exemple #28
0
 //TODO public for now no package need to make assembly
 public void Add(string sopInstance)
 {
     if (Images.ContainsKey(sopInstance))
     {
         Listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "DicomRetriever.Order.Series Attempt to add duplicate _sopInstance:" + sopInstance));
     }
     else
     {
         Images.Add(sopInstance, new Image(sopInstance, PlacementMode, OrderLevel));
     }
 }
        public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny)
        {
            if (_destination != null)
            {
                _destination.Dispose(listener, pipelineFailureExceptionIfAny);

                //if the extraction failed, the table didn't exist in the destination (i.e. the table was created during the extraction) and we are to DropTableIfLoadFails
                if (pipelineFailureExceptionIfAny != null && _tableDidNotExistAtStartOfLoad && DropTableIfLoadFails)
                {
                    if (_destinationDatabase != null)
                    {
                        var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);

                        if (tbl.Exists())
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DropTableIfLoadFails is true so about to drop table " + tbl));
                            tbl.Drop();
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Dropped table " + tbl));
                        }
                    }
                }

                if (pipelineFailureExceptionIfAny == null &&
                    _request.IsBatchResume &&
                    MakeFinalTableDistinctWhenBatchResuming &&
                    _destinationDatabase != null &&
                    _toProcess != null)
                {
                    var tbl = _destinationDatabase.ExpectTable(_toProcess.TableName);
                    if (tbl.Exists())
                    {
                        // if there is no primary key then failed batches may have introduced duplication
                        if (!tbl.DiscoverColumns().Any(p => p.IsPrimaryKey))
                        {
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Making {tbl} distinct incase there are duplicate rows from bad batch resumes"));
                            tbl.MakeDistinct(50000000);
                            listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Finished distincting {tbl}"));
                        }
                    }
                }
            }

            TableLoadInfo?.CloseAndArchive();

            // also close off the cumulative extraction result
            if (_request is ExtractDatasetCommand)
            {
                var result = ((IExtractDatasetCommand)_request).CumulativeExtractionResults;
                if (result != null && _toProcess != null)
                {
                    result.CompleteAudit(this.GetType(), GetDestinationDescription(), TableLoadInfo.Inserts, _request.IsBatchResume, pipelineFailureExceptionIfAny != null);
                }
            }
        }
        private void ExtractLookupTableSql(BundledLookupTable lookup, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                var server = DataAccessPortal.GetInstance().ExpectServer(lookup.TableInfo, DataAccessContext.DataExport);

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for lookup " + lookup.TableInfo.Name));
                using (var con = server.GetConnection())
                {
                    con.Open();
                    var sqlString = "SELECT * FROM " + lookup.TableInfo.Name;
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command: " + sqlString));
                    var cmd = DatabaseCommandHelper.GetCommand(sqlString, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(lookup.TableInfo.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sqlString, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from Lookup " + lookup.TableInfo.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for Lookup: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult("SELECT * FROM " + lookup.TableInfo.Name, lookup.TableInfo);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of Lookup " + lookup.TableInfo.Name + " failed ", e));
                throw;
            }
        }