public override void Execute() { base.Execute(); if (TargetFileInfo != null && IsSingleObject) { var d = _gatherer.GatherDependencies(_toExport[0]); var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); File.WriteAllText(TargetFileInfo.FullName, serial); return; } if (TargetDirectoryInfo == null) { throw new Exception("No output directory set"); } foreach (var o in _toExport) { var d = _gatherer.GatherDependencies(o); var filename = QuerySyntaxHelper.MakeHeaderNameSane(o.ToString()) + ".sd"; var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); var f = Path.Combine(TargetDirectoryInfo.FullName, filename); File.WriteAllText(f, serial); } }
private void SetEnabledness() { textBox1.ForeColor = Color.Black; //if theres some text typed and we want typed text to be sane if (RequireSaneHeaderText && !string.IsNullOrWhiteSpace(textBox1.Text)) { //if the sane name doesn't match the if (!textBox1.Text.Equals(QuerySyntaxHelper.MakeHeaderNameSane(textBox1.Text), StringComparison.CurrentCultureIgnoreCase)) { btnOk.Enabled = false; textBox1.ForeColor = Color.Red; return; } } btnOk.Enabled = (!string.IsNullOrWhiteSpace(textBox1.Text)) || _allowBlankText; }
private DataTable GenerateTransposedTable(DataTable inputTable) { DataTable outputTable = new DataTable(); // Add columns by looping rows // Header row's first column is same as in inputTable outputTable.Columns.Add(inputTable.Columns[0].ColumnName); // Header row's second column onwards, 'inputTable's first column taken foreach (DataRow inRow in inputTable.Rows) { string newColName = inRow[0].ToString(); if (MakeHeaderNamesSane) { newColName = QuerySyntaxHelper.MakeHeaderNameSane(newColName); } outputTable.Columns.Add(newColName); } // Add rows by looping columns for (int rCount = 1; rCount <= inputTable.Columns.Count - 1; rCount++) { DataRow newRow = outputTable.NewRow(); // First column is inputTable's Header row's second column newRow[0] = inputTable.Columns[rCount].ColumnName; for (int cCount = 0; cCount <= inputTable.Rows.Count - 1; cCount++) { string colValue = inputTable.Rows[cCount][rCount].ToString(); newRow[cCount + 1] = colValue; } outputTable.Rows.Add(newRow); } return(outputTable); }
public void DoWeNeedToMakeTagsSane() { Dictionary <string, string> dodgy = new Dictionary <string, string>(); //For all tags foreach (DicomDictionaryEntry entry in DicomDictionary.Default) { //that can be Sequences if (entry.Keyword != QuerySyntaxHelper.MakeHeaderNameSane(entry.Keyword)) { dodgy.Add(entry.Keyword, QuerySyntaxHelper.MakeHeaderNameSane(entry.Keyword)); } } foreach (var kvp in dodgy) { Console.WriteLine(kvp.Key + "|" + kvp.Value); } //no we don't Assert.AreEqual(0, dodgy.Count); }
public void GetHeadersFromFile(CsvReader r) { //check state if (_state != State.Start) { throw new Exception("Illegal state, headers cannot be read at state " + _state); } _state = State.AfterHeadersRead; //if we are not forcing headers we must get them from the file if (string.IsNullOrWhiteSpace(_forceHeaders)) { //read the first record from the file (this will read the header and first row bool empty = !r.Read(); if (empty) { FileIsEmpty = true; return; } //get headers from first line of the file r.ReadHeader(); _headers = r.Context.HeaderRecord; } else { //user has some specific headers he wants to override with _headers = _forceHeaders.Split(new[] { r.Configuration.Delimiter }, StringSplitOptions.None); r.Configuration.HasHeaderRecord = false; } //ignore these columns (trimmed and ignoring case) if (!string.IsNullOrWhiteSpace(_ignoreColumns)) { IgnoreColumnsList = new HashSet <string>( _ignoreColumns.Split(new[] { r.Configuration.Delimiter }, StringSplitOptions.None) .Select(h => h.Trim()) , StringComparer.CurrentCultureIgnoreCase); } else { IgnoreColumnsList = new HashSet <string>(); } //Make adjustments to the headers (trim etc) //trim them for (int i = 0; i < _headers.Length; i++) { if (!string.IsNullOrWhiteSpace(_headers[i])) { _headers[i] = _headers[i].Trim(); } } //throw away trailing null headers e.g. the header line "Name,Date,,," var trailingNullHeaders = _headers.Reverse().TakeWhile(s => s.IsBasicallyNull()).Count(); if (trailingNullHeaders > 0) { _headers = _headers.Take(_headers.Length - trailingNullHeaders).ToArray(); } //and maybe also help them out with a bit of sanity fixing if (_makeHeaderNamesSane) { for (int i = 0; i < _headers.Length; i++) { _headers[i] = QuerySyntaxHelper.MakeHeaderNameSane(_headers[i]); } } }
public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { _listener = listener; if (_fileWorklist == null) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipping component because _fileWorklist is null")); return(null); } _stopwatch.Start(); var dt = base.GetDataTable(); try { FileInfo file; DirectoryInfo directory; if (!_fileWorklist.GetNextFileOrDirectoryToProcess(out directory, out file)) { return(null); } if (file != null && directory == null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(file.Name)); } else if (directory != null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(directory.Name)); } else { throw new Exception("Expected IDicomProcessListProvider to return either a DirectoryInfo or a FileInfo not both/neither"); } if (directory != null) { ProcessDirectoryAsync(dt, directory, listener); Task.WaitAll(tasks.ToArray()); } else //Input is a single zip file if (file.Extension == ".zip") { ProcessZipArchive(dt, listener, file.FullName); } else if (file.Extension == ".dcm") { using (var fs = file.Open(FileMode.Open)) ProcessFile(fs, dt, file.Name, listener); } else { throw new Exception("Expected file to be either .zip or .dcm "); } } finally { //stop recording performance _stopwatch.Stop(); //let people know how far through we are UpdateProgressListeners(); } return(dt); }
private DataTable GetAllData(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { Stopwatch sw = new Stopwatch(); sw.Start(); if (_fileToLoad == null) { throw new Exception("_fileToLoad has not been set yet, possibly component has not been Initialized yet"); } if (!IsAcceptableFileExtension()) { throw new Exception("FileToLoad (" + _fileToLoad.File.FullName + ") extension was not XLS or XLSX, dubious"); } using (var fs = new FileStream(_fileToLoad.File.FullName, FileMode.Open)) { IWorkbook wb; if (_fileToLoad.File.Extension == ".xls") { wb = new HSSFWorkbook(fs); } else { wb = new XSSFWorkbook(fs); } DataTable toReturn; try { ISheet worksheet; //if the user hasn't picked one, use the first worksheet = string.IsNullOrWhiteSpace(WorkSheetName) ? wb.GetSheetAt(0) : wb.GetSheet(WorkSheetName); toReturn = GetAllData(worksheet, listener); //set the table name the file name toReturn.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(_fileToLoad.File.Name)); if (toReturn.Columns.Count == 0) { throw new FlatFileLoadException(string.Format("The Excel sheet '{0}' in workbook '{1}' is empty", worksheet.SheetName, _fileToLoad.File.Name)); } //if the user wants a column in the DataTable storing the filename loaded add it if (!string.IsNullOrWhiteSpace(AddFilenameColumnNamed)) { toReturn.Columns.Add(AddFilenameColumnNamed); foreach (DataRow dataRow in toReturn.Rows) { dataRow[AddFilenameColumnNamed] = _fileToLoad.File.FullName; } } } finally { wb.Close(); } return(toReturn); } }
[TestCase("once #upon", "onceUpon")] //Dodgy characters are stripped before cammel casing after spaces so 'u' gets cammeled even though it has a symbol before it. public void TestMakingHeaderNamesSane(string bad, string expectedGood) { Assert.AreEqual(expectedGood, QuerySyntaxHelper.MakeHeaderNameSane(bad)); }
public DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { try { _listener = listener; int rowsRead = 0; if (_fileToLoad == null) { throw new Exception( "_fileToLoad was not set, it is supposed to be set because of IPipelineRequirement<FlatFileToLoad> - maybe this PreInitialize method was not called?"); } if (Headers == null) { InitializeComponents(); //open the file OpenFile(_fileToLoad.File); if (Headers.FileIsEmpty) { EventHandlers.FileIsEmpty(); return(null); } } //if we do not yet have a data table to load if (_workingTable == null) { //create a table with the name of the file _workingTable = Headers.GetDataTableWithHeaders(_listener); _workingTable.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(_fileToLoad.File.Name)); //set the data table to the new untyped but correctly headered table SetDataTable(_workingTable); //Now we must read some data if (StronglyTypeInput && StronglyTypeInputBatchSize != 0) { int batchSizeToLoad = StronglyTypeInputBatchSize == -1 ? int.MaxValue : StronglyTypeInputBatchSize; if (batchSizeToLoad < 500) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "You set StronglyTypeInputBatchSize to " + batchSizeToLoad + " this will be increased to 500 because that number is too small!", null)); batchSizeToLoad = 500; } //user want's to strongly type input with a custom batch size rowsRead = IterativelyBatchLoadDataIntoDataTable(_workingTable, batchSizeToLoad); } else { //user does not want to strongly type or is strongly typing with regular batch size rowsRead = IterativelyBatchLoadDataIntoDataTable(_workingTable, MaxBatchSize); } if (StronglyTypeInput) { StronglyTypeWorkingTable(); } if (rowsRead == 0) { EventHandlers.FileIsEmpty(); } } else { //this isn't the first pass, so we have everything set up and can just read more data //data table has been set so has a good schema or no schema depending on what user wanted, at least it has all the headers etc setup correctly //so just clear the rows we loaded last chunk and load more _workingTable.Rows.Clear(); //get more rows rowsRead = IterativelyBatchLoadDataIntoDataTable(_workingTable, MaxBatchSize); } //however we read //if rows were not read if (rowsRead == 0) { return(null);//we are done } //rows were read so return a copy of the DataTable, because we will continually reload the same DataTable schema throughout the file we don't want to give up our reference to good headers incase someone mutlates it var copy = _workingTable.Copy(); foreach (DataColumn unamed in Headers.UnamedColumns) { copy.Columns.Remove(unamed.ColumnName); } return(copy); } catch (Exception) { //make sure file is closed if it crashes if (_reader != null) { _reader.Dispose(); } throw; } }
public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { if (toProcess == null) { return(null); } IDatabaseColumnRequestAdjuster adjuster = null; if (Adjuster != null) { var constructor = new ObjectConstructor(); adjuster = (IDatabaseColumnRequestAdjuster)constructor.Construct(Adjuster); } //work out the table name for the table we are going to create if (TargetTableName == null) { if (string.IsNullOrWhiteSpace(toProcess.TableName)) { throw new Exception("Chunk did not have a TableName, did not know what to call the newly created table"); } TargetTableName = QuerySyntaxHelper.MakeHeaderNameSane(toProcess.TableName); } ClearPrimaryKeyFromDataTableAndExplicitWriteTypes(toProcess); StartAuditIfExists(TargetTableName); if (_loggingDatabaseListener != null) { listener = new ForkDataLoadEventListener(listener, _loggingDatabaseListener); } EnsureTableHasDataInIt(toProcess); bool createdTable = false; if (_firstTime) { bool tableAlreadyExistsButEmpty = false; if (!_database.Exists()) { throw new Exception("Database " + _database + " does not exist"); } discoveredTable = _database.ExpectTable(TargetTableName); //table already exists if (discoveredTable.Exists()) { tableAlreadyExistsButEmpty = true; if (!AllowLoadingPopulatedTables) { if (discoveredTable.IsEmpty()) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found table " + TargetTableName + " already, normally this would forbid you from loading it (data duplication / no primary key etc) but it is empty so we are happy to load it, it will not be created")); } else { throw new Exception("There is already a table called " + TargetTableName + " at the destination " + _database); } } if (AllowResizingColumnsAtUploadTime) { _dataTypeDictionary = discoveredTable.DiscoverColumns().ToDictionary(k => k.GetRuntimeName(), v => v.GetDataTypeComputer(), StringComparer.CurrentCultureIgnoreCase); } } else { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Determined that the table name " + TargetTableName + " is unique at destination " + _database)); } //create connection to destination if (!tableAlreadyExistsButEmpty) { createdTable = true; if (AllowResizingColumnsAtUploadTime) { _database.CreateTable(out _dataTypeDictionary, TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster); } else { _database.CreateTable(TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster); } listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Created table " + TargetTableName + " successfully.")); } _managedConnection = _server.BeginNewTransactedConnection(); _bulkcopy = discoveredTable.BeginBulkInsert(_managedConnection.ManagedTransaction); if (Culture != null) { _bulkcopy.DateTimeDecider.Culture = Culture; } _firstTime = false; } try { if (AllowResizingColumnsAtUploadTime && !createdTable) { ResizeColumnsIfRequired(toProcess, listener); } //push the data swTimeSpentWritting.Start(); _affectedRows += _bulkcopy.Upload(toProcess); swTimeSpentWritting.Stop(); listener.OnProgress(this, new ProgressEventArgs("Uploading to " + TargetTableName, new ProgressMeasurement(_affectedRows, ProgressType.Records), swTimeSpentWritting.Elapsed)); } catch (Exception e) { _managedConnection.ManagedTransaction.AbandonAndCloseConnection(); if (LoggingServer != null) { _dataLoadInfo.LogFatalError(GetType().Name, ExceptionHelper.ExceptionToListOfInnerMessages(e, true)); } throw new Exception("Failed to write rows (in transaction) to table " + TargetTableName, e); } return(null); }