示例#1
0
 public void CloseLogging()
 {
     if (DataLoadInfo != null)
     {
         DataLoadInfo.CloseAndMarkComplete();
     }
 }
示例#2
0
        protected void Execute(out ExtractionPipelineUseCase pipelineUseCase, out IExecuteDatasetExtractionDestination results)
        {
            DataLoadInfo d = new DataLoadInfo("Internal", _testDatabaseName, "IgnoreMe", "", true, new DiscoveredServer(UnitTestLoggingConnectionString));

            Pipeline pipeline = null;

            try
            {
                pipeline        = SetupPipeline();
                pipelineUseCase = new ExtractionPipelineUseCase(_request.Configuration.Project, _request, pipeline, d);

                pipelineUseCase.Execute(new ThrowImmediatelyDataLoadEventListener());

                Assert.IsNotEmpty(pipelineUseCase.Source.Request.QueryBuilder.SQL);

                Assert.IsTrue(pipelineUseCase.ExtractCommand.State == ExtractCommandState.Completed);
            }
            finally
            {
                if (pipeline != null)
                {
                    pipeline.DeleteInDatabase();
                }
            }

            results = pipelineUseCase.Destination;
        }
        protected void Execute(out ExtractionPipelineUseCase pipelineUseCase, out IExecuteDatasetExtractionDestination results)
        {
            DataLoadInfo d = new DataLoadInfo("Internal", _testDatabaseName, "IgnoreMe", "", true, new DiscoveredServer(UnitTestLoggingConnectionString));

            Pipeline pipeline = null;

            //because extractable columns is likely to include chi column, it will be removed from the collection (for a substitution identifier)
            var before = _extractableColumns.ToArray();

            try
            {
                pipeline        = SetupPipeline();
                pipelineUseCase = new ExtractionPipelineUseCase(_request.Configuration.Project, _request, pipeline, d);

                pipelineUseCase.Execute(new ThrowImmediatelyDataLoadEventListener());

                Assert.IsNotEmpty(pipelineUseCase.Source.Request.QueryBuilder.SQL);

                Assert.IsTrue(pipelineUseCase.ExtractCommand.State == ExtractCommandState.Completed);
            }
            finally
            {
                if (pipeline != null)
                {
                    pipeline.DeleteInDatabase();
                }
            }

            results             = pipelineUseCase.Destination;
            _extractableColumns = new List <IColumn>(before);
        }
示例#4
0
        public void OnNotify(object sender, NotifyEventArgs e)
        {
            if (DataLoadInfo != null)
            {
                switch (e.ProgressEventType)
                {
                case ProgressEventType.Trace:
                case ProgressEventType.Debug:
                    break;

                case ProgressEventType.Information:
                    DataLoadInfo.LogProgress(Logging.DataLoadInfo.ProgressEventType.OnInformation, sender.GetType().Name, e.Message + (e.Exception != null ? "Exception=" + ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception, true) : ""));
                    break;

                case ProgressEventType.Warning:
                    DataLoadInfo.LogProgress(Logging.DataLoadInfo.ProgressEventType.OnWarning, sender.GetType().Name, e.Message + (e.Exception != null ? "Exception=" + ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception, true) : ""));
                    break;

                case ProgressEventType.Error:
                    DataLoadInfo.LogProgress(Logging.DataLoadInfo.ProgressEventType.OnTaskFailed, sender.GetType().Name, e.Message);
                    DataLoadInfo.LogFatalError(sender.GetType().Name, e.Exception != null ? ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception, true) : e.Message);
                    break;

                default:
                    throw new ArgumentOutOfRangeException();
                }
            }
            _listener.OnNotify(sender, e);
        }
示例#5
0
        public void LogError(string message, Exception exception)
        {
            // we are bailing out before the load process has had a chance to create a DataLoadInfo object
            if (DataLoadInfo == null)
            {
                CreateDataLoadInfo();
            }

            DataLoadInfo.LogFatalError(typeof(DataLoadProcess).Name, message + Environment.NewLine + ExceptionHelper.ExceptionToListOfInnerMessages(exception, true));
            DataLoadInfo.CloseAndMarkComplete();
        }
        private void StartAuditIfExists(string tableName)
        {
            if (LoggingServer != null)
            {
                _loggingDatabaseSettings = DataAccessPortal.GetInstance().ExpectServer(LoggingServer, DataAccessContext.Logging);
                var logManager = new LogManager(_loggingDatabaseSettings);
                logManager.CreateNewLoggingTaskIfNotExists("Internal");

                _dataLoadInfo            = (DataLoadInfo)logManager.CreateDataLoadInfo("Internal", GetType().Name, "Loading table " + tableName, "", false);
                _loggingDatabaseListener = new ToLoggingDatabaseDataLoadEventListener(logManager, _dataLoadInfo);
            }
        }
示例#7
0
        public void LogWarning(string senderName, string message)
        {
            if (DataLoadInfo == null)
            {
                throw new Exception("Logging hasn't been started for this job (call StartLogging first)");
            }

            if (!DataLoadInfo.IsClosed)
            {
                DataLoadInfo.LogProgress(Logging.DataLoadInfo.ProgressEventType.OnWarning, senderName, message);
            }
        }
示例#8
0
        public ExtractionPipelineUseCase(IProject project, IExtractCommand extractCommand, IPipeline pipeline, DataLoadInfo dataLoadInfo)
        {
            _dataLoadInfo  = dataLoadInfo;
            ExtractCommand = extractCommand;
            _pipeline      = pipeline;

            extractCommand.ElevateState(ExtractCommandState.NotLaunched);

            AddInitializationObject(ExtractCommand);
            AddInitializationObject(project);
            AddInitializationObject(_dataLoadInfo);
            AddInitializationObject(project.DataExportRepository.CatalogueRepository);

            GenerateContext();
        }
示例#9
0
        public void FataErrorLoggingTest()
        {
            DataLoadInfo d = new DataLoadInfo("Internal", "HICSSISLibraryTests.FataErrorLoggingTest",
                                              "Test case for fatal error generation",
                                              "No rollback is possible/required as no database rows are actually inserted",
                                              true, new DiscoveredServer(UnitTestLoggingConnectionString));

            DataSource[] ds = new DataSource[] { new DataSource("nothing", DateTime.Now) };



            TableLoadInfo t = new TableLoadInfo(d, "Unit test only", "Unit test only", ds, 5);

            t.Inserts += 3; //simulate that it crashed after 3

            d.LogFatalError("HICSSISLibraryTests.FataErrorLoggingTest", "Some terrible event happened");

            Assert.IsTrue(d.IsClosed);
        }
示例#10
0
        private DataLoadInfo StartAudit()
        {
            DataLoadInfo dataLoadInfo;

            _logManager = _configuration.GetExplicitLoggingDatabaseServerOrDefault();

            try
            {
                //populate DataLoadInfo object (Audit)
                dataLoadInfo = new DataLoadInfo(ExecuteDatasetExtractionSource.AuditTaskName,
                                                Process.GetCurrentProcess().ProcessName,
                                                _configuration.GetLoggingRunName(),
                                                "", false, _logManager.Server);
            }
            catch (Exception e)
            {
                throw new Exception("Problem occurred trying to create Logging Component:" + e.Message + " (check user has access to " + _logManager.Server + " and that the DataLoadTask '" + ExecuteDatasetExtractionSource.AuditTaskName + "' exists)", e);
            }

            return(dataLoadInfo);
        }
示例#11
0
        public void MD5Test()
        {
            string fileContents = "TestStringThatCouldBeSomethingInAFile";

            byte[] hashAsBytes;

            MemoryStream memory        = new MemoryStream();
            StreamWriter writeToMemory = new StreamWriter(memory);

            writeToMemory.Write(fileContents);
            memory.Flush();
            memory.Position = 0;

            using (var md5 = MD5.Create())
            {
                hashAsBytes = md5.ComputeHash(memory);
            }

            DataSource[] ds = new DataSource[] { new DataSource("nothing", DateTime.Now) };

            ds[0].MD5 = hashAsBytes; //MD5 is a property so confirm write and read are the same - and dont bomb

            Assert.AreEqual(ds[0].MD5, hashAsBytes);

            DataLoadInfo d = new DataLoadInfo("Internal", "HICSSISLibraryTests.FataErrorLoggingTest",
                                              "Test case for fatal error generation",
                                              "No rollback is possible/required as no database rows are actually inserted",
                                              true,
                                              new DiscoveredServer(UnitTestLoggingConnectionString));

            TableLoadInfo t = new TableLoadInfo(d, "Unit test only", "Unit test only", ds, 5);

            t.Inserts += 5; //simulate that it crashed after 3
            t.CloseAndArchive();

            d.CloseAndMarkComplete();
        }
        private ExtractCommandState ExtractSupportingSql(SupportingSQLTable sql, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for global SupportingSQL " + sql.Name));
                using (var con = sql.GetServer().GetConnection())
                {
                    con.Open();
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command " + sql.SQL));
                    var cmd = DatabaseCommandHelper.GetCommand(sql.SQL, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(sql.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sql.SQL, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from SupportingSQL " + sql.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for SupportingSQL: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult(sql.SQL, sql);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                    else
                    {
                        var extractGlobalsCommand = (_request as ExtractGlobalsCommand);
                        Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
                        var result =
                            new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
                                                              extractGlobalsCommand.Configuration,
                                                              sql.SQL,
                                                              sql);
                        result.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                        extractGlobalsCommand.ExtractionResults.Add(result);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of SupportingSQL " + sql + " failed ", e));
                return(ExtractCommandState.Crashed);
            }

            return(ExtractCommandState.Completed);
        }
示例#13
0
        protected bool TryExtractSupportingSQLTable(SupportingSQLTable sql, DirectoryInfo directory, IExtractionConfiguration configuration, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Preparing to extract Supporting SQL " + sql + " to directory " + directory.FullName));

                Stopwatch sw = new Stopwatch();
                sw.Start();

                //start auditing it as a table load
                string target        = Path.Combine(directory.FullName, sql.Name + ".csv");
                var    tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", target, new[] { new DataSource(sql.SQL, DateTime.Now) }, -1);

                TryExtractSupportingSQLTableImpl(sql, directory, configuration, listener, out int sqlLinesWritten, out string description);

                sw.Stop();

                //end auditing it
                tableLoadInfo.Inserts = sqlLinesWritten;
                tableLoadInfo.CloseAndArchive();

                if (_request is ExtractDatasetCommand)
                {
                    var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                    var supplementalResult = result.AddSupplementalExtractionResult(sql.SQL, sql);
                    supplementalResult.CompleteAudit(this.GetType(), description, sqlLinesWritten);
                }
                else
                {
                    var extractGlobalsCommand = (_request as ExtractGlobalsCommand);
                    Debug.Assert(extractGlobalsCommand != null, "extractGlobalsCommand != null");
                    var result =
                        new SupplementalExtractionResults(extractGlobalsCommand.RepositoryLocator.DataExportRepository,
                                                          extractGlobalsCommand.Configuration,
                                                          sql.SQL,
                                                          sql);
                    result.CompleteAudit(this.GetType(), description, sqlLinesWritten);
                    extractGlobalsCommand.ExtractionResults.Add(result);
                }

                listener.OnProgress(this, new ProgressEventArgs("Extract " + sql, new ProgressMeasurement(sqlLinesWritten, ProgressType.Records), sw.Elapsed));
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Extracted " + sqlLinesWritten + " records from SupportingSQL " + sql + " into directory " + directory.FullName));

                return(true);
            }
            catch (Exception e)
            {
                if (e is SqlException)
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to run extraction SQL (make sure to fully specify all database/table/column objects completely):" + Environment.NewLine + sql.SQL, e));
                }
                else
                {
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to extract " + sql + " into directory " + directory.FullName, e));
                }

                return(false);
            }
        }
示例#14
0
        private void ExtractGlobals(ExtractGlobalsCommand request, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            var globalsDirectory = request.GetExtractionDirectory();

            if (CleanExtractionFolderBeforeExtraction)
            {
                globalsDirectory.Delete(true);
                globalsDirectory.Create();
            }

            foreach (var doc in request.Globals.Documents)
            {
                request.Globals.States[doc] = TryExtractSupportingDocument(doc, globalsDirectory, listener)
                    ? ExtractCommandState.Completed
                    : ExtractCommandState.Crashed;
            }

            foreach (var sql in request.Globals.SupportingSQL)
            {
                request.Globals.States[sql] = TryExtractSupportingSQLTable(sql, globalsDirectory, request.Configuration, listener, dataLoadInfo)
                    ? ExtractCommandState.Completed
                    : ExtractCommandState.Crashed;
            }
        }
示例#15
0
 public virtual void PreInitialize(DataLoadInfo value, IDataLoadEventListener listener)
 {
     _dataLoadInfo = value;
 }
        private void ExtractLookupTableSql(BundledLookupTable lookup, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            try
            {
                var tempDestination = new DataTableUploadDestination();

                var server = DataAccessPortal.GetInstance().ExpectServer(lookup.TableInfo, DataAccessContext.DataExport);

                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to download SQL for lookup " + lookup.TableInfo.Name));
                using (var con = server.GetConnection())
                {
                    con.Open();
                    var sqlString = "SELECT * FROM " + lookup.TableInfo.Name;
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Connection opened successfully, about to send SQL command: " + sqlString));
                    var cmd = DatabaseCommandHelper.GetCommand(sqlString, con);
                    var da  = DatabaseCommandHelper.GetDataAdapter(cmd);

                    var sw = new Stopwatch();

                    sw.Start();
                    DataTable dt = new DataTable();
                    da.Fill(dt);

                    dt.TableName = GetTableName(_destinationDatabase.Server.GetQuerySyntaxHelper().GetSensibleTableNameFromString(lookup.TableInfo.Name));

                    var tableLoadInfo = dataLoadInfo.CreateTableLoadInfo("", dt.TableName, new[] { new DataSource(sqlString, DateTime.Now) }, -1);
                    tableLoadInfo.Inserts = dt.Rows.Count;

                    listener.OnProgress(this, new ProgressEventArgs("Reading from Lookup " + lookup.TableInfo.Name, new ProgressMeasurement(dt.Rows.Count, ProgressType.Records), sw.Elapsed));
                    listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Decided on the following destination table name for Lookup: " + dt.TableName));

                    tempDestination.AllowResizingColumnsAtUploadTime = true;
                    tempDestination.PreInitialize(GetDestinationDatabase(listener), listener);
                    tempDestination.ProcessPipelineData(dt, listener, new GracefulCancellationToken());
                    tempDestination.Dispose(listener, null);

                    //end auditing it
                    tableLoadInfo.CloseAndArchive();

                    if (_request is ExtractDatasetCommand)
                    {
                        var result             = (_request as ExtractDatasetCommand).CumulativeExtractionResults;
                        var supplementalResult = result.AddSupplementalExtractionResult("SELECT * FROM " + lookup.TableInfo.Name, lookup.TableInfo);
                        supplementalResult.CompleteAudit(this.GetType(), TargetDatabaseServer.ID + "|" + GetDatabaseName() + "|" + dt.TableName, dt.Rows.Count);
                    }
                }
            }
            catch (Exception e)
            {
                listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Extraction of Lookup " + lookup.TableInfo.Name + " failed ", e));
                throw;
            }
        }
        public void ExtractGlobals(ExtractGlobalsCommand request, IDataLoadEventListener listener, DataLoadInfo dataLoadInfo)
        {
            var globalsToExtract = request.Globals;

            if (globalsToExtract.Any())
            {
                var globalsDirectory = request.GetExtractionDirectory();
                if (CleanExtractionFolderBeforeExtraction)
                {
                    globalsDirectory.Delete(true);
                    globalsDirectory.Create();
                }

                foreach (var sql in globalsToExtract.SupportingSQL)
                {
                    ExtractSupportingSql(sql, listener, dataLoadInfo);
                }

                foreach (var doc in globalsToExtract.Documents)
                {
                    ExtractSupportingDocument(globalsDirectory, doc, listener);
                }
            }
        }