private void btnExecute_Click(object sender, EventArgs e) { var pipeline = CreateAndInitializePipeline(); //if it is already executing if (btnExecute.Text == "Stop") { _cancel.Cancel();//set the cancellation token return; } btnExecute.Text = "Stop"; _cancel = new CancellationTokenSource(); //clear any old results progressUI1.Clear(); if (PipelineExecutionStarted != null) { PipelineExecutionStarted(this, new PipelineEngineEventArgs(pipeline)); } progressUI1.ShowRunning(true); bool success = false; //start a new thread Task t = new Task(() => { try { pipeline.ExecutePipeline(new GracefulCancellationToken(_cancel.Token, _cancel.Token)); success = true; } catch (Exception ex) { fork.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Pipeline execution failed", ex)); } } ); t.ContinueWith(x => { if (success) { //if it successfully got here then Thread has run the engine to completion successfully if (PipelineExecutionFinishedsuccessfully != null) { PipelineExecutionFinishedsuccessfully(this, new PipelineEngineEventArgs(pipeline)); } } progressUI1.ShowRunning(false); btnExecute.Text = "Execute"; //make it so user can execute again }, TaskScheduler.FromCurrentSynchronizationContext()); t.Start(); }
public override void GenerateReport(ICatalogue c, IDataLoadEventListener listener, CancellationToken cancellationToken) { SetupLogging(c.CatalogueRepository); var toDatabaseLogger = new ToLoggingDatabaseDataLoadEventListener(this, _logManager, _loggingTask, "DQE evaluation of " + c); var forker = new ForkDataLoadEventListener(listener, toDatabaseLogger); try { _catalogue = c; var dqeRepository = new DQERepository(c.CatalogueRepository); byPivotCategoryCubesOverTime.Add("ALL", new PeriodicityCubesOverTime("ALL")); byPivotRowStatesOverDataLoadRunId.Add("ALL", new DQEStateOverDataLoadRunId("ALL")); Check(new FromDataLoadEventListenerToCheckNotifier(forker)); var sw = Stopwatch.StartNew(); using (var con = _server.GetConnection()) { con.Open(); var cmd = _server.GetCommand(_queryBuilder.SQL, con); cmd.CommandTimeout = 500000; var t = cmd.ExecuteReaderAsync(cancellationToken); t.Wait(cancellationToken); if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException("User cancelled DQE while fetching data"); } var r = t.Result; int progress = 0; while (r.Read()) { cancellationToken.ThrowIfCancellationRequested(); progress++; int dataLoadRunIDOfCurrentRecord = 0; //to start with assume we will pass the results for the 'unknown batch' (where data load run ID is null or not available) //if the DataReader is likely to have a data load run ID column if (_containsDataLoadID) { //get data load run id int?runID = dqeRepository.ObjectToNullableInt(r[_dataLoadRunFieldName]); //if it has a value use it (otherwise it is null so use 0 - ugh I know, it's a primary key constraint issue) if (runID != null) { dataLoadRunIDOfCurrentRecord = (int)runID; } } string pivotValue = null; //if the user has a pivot category configured if (_pivotCategory != null) { pivotValue = GetStringValueForPivotField(r[_pivotCategory], forker); if (!haveComplainedAboutNullCategories && string.IsNullOrWhiteSpace(pivotValue)) { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found a null/empty value for pivot category '" + _pivotCategory + "', this record will ONLY be recorded under ALL and not it's specific category, you will not be warned of further nulls because there are likely to be many if there are any")); haveComplainedAboutNullCategories = true; pivotValue = null; } } //always increase the "ALL" category ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r, byPivotCategoryCubesOverTime["ALL"], byPivotRowStatesOverDataLoadRunId["ALL"]); //if there is a value in the current record for the pivot column if (pivotValue != null) { //if it is a novel if (!byPivotCategoryCubesOverTime.ContainsKey(pivotValue)) { //we will need to expand the dictionaries if (byPivotCategoryCubesOverTime.Keys.Count > MaximumPivotValues) { throw new OverflowException( "Encountered more than " + MaximumPivotValues + " values for the pivot column " + _pivotCategory + " this will result in crazy space usage since it is a multiplicative scale of DQE tesseracts"); } //expand both the time periodicity and the state results byPivotRowStatesOverDataLoadRunId.Add(pivotValue, new DQEStateOverDataLoadRunId(pivotValue)); byPivotCategoryCubesOverTime.Add(pivotValue, new PeriodicityCubesOverTime(pivotValue)); } //now we are sure that the dictionaries have the category field we can increment it ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r, byPivotCategoryCubesOverTime[pivotValue], byPivotRowStatesOverDataLoadRunId[pivotValue]); } if (progress % 5000 == 0) { forker.OnProgress(this, new ProgressEventArgs("Processing " + _catalogue, new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed)); } } //final value forker.OnProgress(this, new ProgressEventArgs("Processing " + _catalogue, new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed)); con.Close(); } sw.Stop(); foreach (var state in byPivotRowStatesOverDataLoadRunId.Values) { state.CalculateFinalValues(); } //now commit results using (var con = dqeRepository.BeginNewTransactedConnection()) { try { //mark down that we are beginning an evaluation on this the day of our lord etc... Evaluation evaluation = new Evaluation(dqeRepository, _catalogue); foreach (var state in byPivotRowStatesOverDataLoadRunId.Values) { state.CommitToDatabase(evaluation, _catalogue, con.Connection, con.Transaction); } if (_timePeriodicityField != null) { foreach (PeriodicityCubesOverTime periodicity in byPivotCategoryCubesOverTime.Values) { periodicity.CommitToDatabase(evaluation); } } con.ManagedTransaction.CommitAndCloseConnection(); } catch (Exception) { con.ManagedTransaction.AbandonAndCloseConnection(); throw; } } forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "CatalogueConstraintReport completed successfully and committed results to DQE server")); } catch (Exception e) { if (!(e is OperationCanceledException)) { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Fatal Crash", e)); } else { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DQE Execution Cancelled", e)); } } finally { toDatabaseLogger.FinalizeTableLoadInfos(); } }