public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { toProcess.TableName = GetTableName(); toProcess.ExtendedProperties.Add("ProperlyNamed", true); var extractDatasetCommand = _request as ExtractDatasetCommand; if (extractDatasetCommand != null) { var catalogue = extractDatasetCommand.Catalogue; var sourceFolder = _request.GetExtractionDirectory(); if (sourceFolder == null) { throw new Exception("Could not find Source Folder. Does the project have an Extraction Directory defined?"); } var outputFolder = sourceFolder.Parent.CreateSubdirectory(ExtractionDirectory.METADATA_FOLDER_NAME); var outputFile = new FileInfo(Path.Combine(outputFolder.FullName, toProcess.TableName + ".sd")); catalogue.Name = toProcess.TableName; var cmd = new ExecuteCommandExportObjectsToFile(_activator, catalogue, outputFile); cmd.Execute(); catalogue.RevertToDatabaseState(); } return(toProcess); }
private void WriteBundleContents(IExtractableDatasetBundle datasetBundle, IDataLoadEventListener job, GracefulCancellationToken cancellationToken) { var rootDir = _request.GetExtractionDirectory(); var supportingSQLFolder = new DirectoryInfo(Path.Combine(rootDir.FullName, SupportingSQLTable.ExtractionFolderName)); var lookupDir = rootDir.CreateSubdirectory("Lookups"); //extract the documents foreach (SupportingDocument doc in datasetBundle.Documents) { datasetBundle.States[doc] = TryExtractSupportingDocument(doc, rootDir, job) ? ExtractCommandState.Completed : ExtractCommandState.Crashed; } //extract supporting SQL foreach (SupportingSQLTable sql in datasetBundle.SupportingSQL) { datasetBundle.States[sql] = TryExtractSupportingSQLTable(sql, supportingSQLFolder, _request.Configuration, job, _dataLoadInfo) ? ExtractCommandState.Completed : ExtractCommandState.Crashed; } //extract lookups foreach (BundledLookupTable lookup in datasetBundle.LookupTables) { datasetBundle.States[lookup] = TryExtractLookupTable(lookup, lookupDir, job) ? ExtractCommandState.Completed : ExtractCommandState.Crashed; } }
public DirectoryInfo GetDirectoryFor(IExtractCommand request) { var cmd = request as IExtractDatasetCommand; if (string.IsNullOrWhiteSpace(ExtractionSubdirectoryPattern) || cmd == null) { return(request.GetExtractionDirectory()); } var cata = cmd.SelectedDataSets.ExtractableDataSet.Catalogue; if (ExtractionSubdirectoryPattern.Contains("$a") && string.IsNullOrWhiteSpace(cata.Acronym)) { throw new Exception($"Catalogue {cata} does not have an Acronym and ExtractionSubdirectoryPattern contains $a"); } var path = Path.Combine(cmd.Project.ExtractionDirectory, ExtractionSubdirectoryPattern .Replace("$c", QuerySyntaxHelper.MakeHeaderNameSensible(cmd.Configuration.Name)) .Replace("$i", cmd.Configuration.ID.ToString()) .Replace("$d", QuerySyntaxHelper.MakeHeaderNameSensible(cata.Name)) .Replace("$a", QuerySyntaxHelper.MakeHeaderNameSensible(cata.Acronym)) .Replace("$n", cata.ID.ToString()) ); var dir = new DirectoryInfo(path); if (!dir.Exists) { dir.Create(); } return(dir); }
private void WriteBundleContents(IExtractableDatasetBundle datasetBundle, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { var bundle = ((ExtractDatasetCommand)_request).DatasetBundle; foreach (var sql in bundle.SupportingSQL) { bundle.States[sql] = ExtractSupportingSql(sql, listener, _dataLoadInfo); } foreach (var document in ((ExtractDatasetCommand)_request).DatasetBundle.Documents) { bundle.States[document] = ExtractSupportingDocument(_request.GetExtractionDirectory(), document, listener); } //extract lookups foreach (BundledLookupTable lookup in datasetBundle.LookupTables) { try { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to extract lookup " + lookup)); ExtractLookupTableSql(lookup, listener, _dataLoadInfo); datasetBundle.States[lookup] = ExtractCommandState.Completed; } catch (Exception e) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error occurred trying to extract lookup " + lookup + " on server " + lookup.TableInfo.Server, e)); datasetBundle.States[lookup] = ExtractCommandState.Crashed; } } haveExtractedBundledContent = true; }
public void PreInitialize(IExtractCommand value, IDataLoadEventListener listener) { _request = value; DirectoryPopulated = _request.GetExtractionDirectory(); if (CleanExtractionFolderBeforeExtraction && value is ExtractDatasetCommand) { DirectoryPopulated.Delete(true); DirectoryPopulated.Create(); } }
public void PreInitialize(IExtractCommand request, IDataLoadEventListener listener) { _request = request; if (_request == ExtractDatasetCommand.EmptyCommand) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Request is ExtractDatasetCommand.EmptyCommand, checking will not be carried out")); return; } LinesWritten = 0; DirectoryPopulated = request.GetExtractionDirectory(); PreInitializeImpl(request, listener); }
public void Check(ICheckNotifier notifier) { if (_request == ExtractDatasetCommand.EmptyCommand) { notifier.OnCheckPerformed(new CheckEventArgs("Request is ExtractDatasetCommand.EmptyCommand, checking will not be carried out", CheckResult.Warning)); return; } try { string result = DateTime.Now.ToString(DateFormat); notifier.OnCheckPerformed(new CheckEventArgs("DateFormat '" + DateFormat + "' is valid, dates will look like:" + result, CheckResult.Success)); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("DateFormat '" + DateFormat + "' was invalid", CheckResult.Fail, e)); } var dsRequest = _request as ExtractDatasetCommand; if (UseAcronymForFileNaming && dsRequest != null) { if (string.IsNullOrWhiteSpace(dsRequest.Catalogue.Acronym)) { notifier.OnCheckPerformed(new CheckEventArgs("Catalogue '" + dsRequest.Catalogue + "' does not have an Acronym but UseAcronymForFileNaming is true", CheckResult.Fail)); } } if (CleanExtractionFolderBeforeExtraction) { var rootDir = _request.GetExtractionDirectory(); var contents = rootDir.GetFileSystemInfos(); if (contents.Length > 0 && notifier.OnCheckPerformed(new CheckEventArgs( $"Extraction directory '{rootDir.FullName}' contained {contents.Length} files/folders:\r\n {string.Join(Environment.NewLine, contents.Take(100).Select(e => e.Name))}", CheckResult.Warning, null, "Delete Files")) ) { rootDir.Delete(true); rootDir.Create(); } } }
public void PreInitialize(IExtractCommand request, IDataLoadEventListener listener) { _request = request; if (_request == ExtractDatasetCommand.EmptyCommand) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Request is ExtractDatasetCommand.EmptyCommand, checking will not be carried out")); return; } if (_request is ExtractGlobalsCommand) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Request is for the extraction of Globals.")); OutputFile = _request.GetExtractionDirectory().FullName; return; } LinesWritten = 0; DirectoryPopulated = request.GetExtractionDirectory(); switch (FlatFileType) { case ExecuteExtractionToFlatFileType.CSV: OutputFile = Path.Combine(DirectoryPopulated.FullName, GetFilename() + ".csv"); if (request.Configuration != null) { _output = new CSVOutputFormat(OutputFile, request.Configuration.Separator, DateFormat); } else { _output = new CSVOutputFormat(OutputFile, ",", DateFormat); } break; default: throw new ArgumentOutOfRangeException(); } listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Setup data extraction destination as " + OutputFile + " (will not exist yet)")); }