public DirectoryInfo GetDirectoryFor(IExtractCommand request) { var cmd = request as IExtractDatasetCommand; if (string.IsNullOrWhiteSpace(ExtractionSubdirectoryPattern) || cmd == null) { return(request.GetExtractionDirectory()); } var cata = cmd.SelectedDataSets.ExtractableDataSet.Catalogue; if (ExtractionSubdirectoryPattern.Contains("$a") && string.IsNullOrWhiteSpace(cata.Acronym)) { throw new Exception($"Catalogue {cata} does not have an Acronym and ExtractionSubdirectoryPattern contains $a"); } var path = Path.Combine(cmd.Project.ExtractionDirectory, ExtractionSubdirectoryPattern .Replace("$c", QuerySyntaxHelper.MakeHeaderNameSensible(cmd.Configuration.Name)) .Replace("$i", cmd.Configuration.ID.ToString()) .Replace("$d", QuerySyntaxHelper.MakeHeaderNameSensible(cata.Name)) .Replace("$a", QuerySyntaxHelper.MakeHeaderNameSensible(cata.Acronym)) .Replace("$n", cata.ID.ToString()) ); var dir = new DirectoryInfo(path); if (!dir.Exists) { dir.Create(); } return(dir); }
public override void Execute() { base.Execute(); if (TargetFileInfo != null && IsSingleObject) { var d = _gatherer.GatherDependencies(_toExport[0]); var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); File.WriteAllText(TargetFileInfo.FullName, serial); return; } if (TargetDirectoryInfo == null) { throw new Exception("No output directory set"); } foreach (var o in _toExport) { var d = _gatherer.GatherDependencies(o); var filename = QuerySyntaxHelper.MakeHeaderNameSensible(o.ToString()) + ".sd"; var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); var f = Path.Combine(TargetDirectoryInfo.FullName, filename); File.WriteAllText(f, serial); } }
private void tbTableName_TextChanged(object sender, EventArgs e) { if (!string.IsNullOrWhiteSpace(tbTableName.Text)) { //if the sane name doesn't match the tbTableName.ForeColor = !tbTableName.Text.Equals(QuerySyntaxHelper.MakeHeaderNameSensible(tbTableName.Text), StringComparison.CurrentCultureIgnoreCase) ? Color.Red : Color.Black; } }
private DataTable GenerateTable() { var dt = new DataTable(); if (_file != null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(_file.File.Name); } foreach (var h in HeadersToRead.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { dt.Columns.Add(h); } return(dt); }
private void SetEnabledness() { textBox1.ForeColor = Color.Black; //if there's some text typed and we want typed text to be sane if (RequireSaneHeaderText && !string.IsNullOrWhiteSpace(textBox1.Text)) { //if the sane name doesn't match the if (!textBox1.Text.Equals(QuerySyntaxHelper.MakeHeaderNameSensible(textBox1.Text), StringComparison.CurrentCultureIgnoreCase)) { btnOk.Enabled = false; textBox1.ForeColor = Color.Red; return; } } btnOk.Enabled = (!string.IsNullOrWhiteSpace(ResultText)) || _allowBlankText; }
private DataTable GenerateTransposedTable(DataTable inputTable) { DataTable outputTable = new DataTable(); // Add columns by looping rows // Header row's first column is same as in inputTable outputTable.Columns.Add(inputTable.Columns[0].ColumnName); // Header row's second column onwards, 'inputTable's first column taken foreach (DataRow inRow in inputTable.Rows) { string newColName = inRow[0].ToString(); if (MakeHeaderNamesSane) { newColName = QuerySyntaxHelper.MakeHeaderNameSensible(newColName); } outputTable.Columns.Add(newColName); } // Add rows by looping columns for (int rCount = 1; rCount <= inputTable.Columns.Count - 1; rCount++) { DataRow newRow = outputTable.NewRow(); // First column is inputTable's Header row's second column newRow[0] = inputTable.Columns[rCount].ColumnName; for (int cCount = 0; cCount <= inputTable.Rows.Count - 1; cCount++) { string colValue = inputTable.Rows[cCount][rCount].ToString(); newRow[cCount + 1] = colValue; } outputTable.Rows.Add(newRow); } return(outputTable); }
protected virtual object GetTriggerName() { return(QuerySyntaxHelper.MakeHeaderNameSensible(_table.GetRuntimeName()) + "_onupdate"); }
public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { _listener = listener; if (_fileWorklist == null) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipping component because _fileWorklist is null")); return(null); } _stopwatch.Start(); var dt = GetDataTable(); try { if (!_fileWorklist.GetNextFileOrDirectoryToProcess(out var directory, out var file)) { return(null); } // Exactly one of file/directory must be null: if ((file != null) == (directory != null)) { throw new Exception("Expected IDicomProcessListProvider to return either a DirectoryInfo or a FileInfo not both/neither"); } if (file != null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(file.FullPath)); if (file.FullPath != null && file.FullPath.EndsWith(".zip")) { //Input is a single zip file ProcessZipArchive(dt, listener, file.FullPath); } else { var df = file.GetDataset(_zipPool); ProcessDataset(file.FullPath, df.Dataset, dt, listener); } } if (directory != null) { // Processing a directory dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(directory.Name)); ProcessDirectoryAsync(dt, directory, listener); Task.WaitAll(tasks.ToArray()); } } finally { //stop recording performance _stopwatch.Stop(); //let people know how far through we are UpdateProgressListeners(); } return(dt); }
private string GetTriggerName() { return(QuerySyntaxHelper.MakeHeaderNameSensible(_table.GetRuntimeName()) + "_OnUpdate"); }
private void CreateViewOldVersionsTableValuedFunction(string sqlUsedToCreateArchiveTableSQL, DbConnection con) { string columnsInArchive = ""; var syntaxHelper = new MicrosoftQuerySyntaxHelper(); Match matchStartColumnExtraction = Regex.Match(sqlUsedToCreateArchiveTableSQL, "CREATE TABLE .*\\("); if (!matchStartColumnExtraction.Success) { throw new Exception("Could not find regex match at start of Archive table CREATE SQL"); } int startExtractingColumnsAt = matchStartColumnExtraction.Index + matchStartColumnExtraction.Length; //trim off excess crud at start and we should have just the columns bit of the create (plus crud at the end) columnsInArchive = sqlUsedToCreateArchiveTableSQL.Substring(startExtractingColumnsAt); //trim off excess crud at the end columnsInArchive = columnsInArchive.Trim(new[] { ')', '\r', '\n' }); string sqlToRun = string.Format("CREATE FUNCTION [" + _schema + "].[{0}_Legacy]", QuerySyntaxHelper.MakeHeaderNameSensible(_table.GetRuntimeName())); sqlToRun += Environment.NewLine; sqlToRun += "(" + Environment.NewLine; sqlToRun += "\t@index DATETIME" + Environment.NewLine; sqlToRun += ")" + Environment.NewLine; sqlToRun += "RETURNS @returntable TABLE" + Environment.NewLine; sqlToRun += "(" + Environment.NewLine; sqlToRun += "/*the return table will follow the structure of the Archive table*/" + Environment.NewLine; sqlToRun += columnsInArchive; //these were added during transaction so we have to specify them again here because transaction will not have been committed yet sqlToRun = sqlToRun.Trim(); sqlToRun += "," + Environment.NewLine; sqlToRun += "\thic_validTo datetime," + Environment.NewLine; sqlToRun += "\thic_userID varchar(128),"; sqlToRun += "\thic_status char(1)"; sqlToRun += ")" + Environment.NewLine; sqlToRun += "AS" + Environment.NewLine; sqlToRun += "BEGIN" + Environment.NewLine; sqlToRun += Environment.NewLine; var liveCols = _columns.Select(c => "[" + c.GetRuntimeName() + "]").Union(new String[] { '[' + SpecialFieldNames.DataLoadRunID + ']', '[' + SpecialFieldNames.ValidFrom + ']' }).ToArray(); string archiveCols = string.Join(",", liveCols) + ",hic_validTo,hic_userID,hic_status"; string cDotArchiveCols = string.Join(",", liveCols.Select(s => "c." + s)); sqlToRun += "\tINSERT @returntable" + Environment.NewLine; sqlToRun += string.Format("\tSELECT " + archiveCols + " FROM [{0}] WHERE @index BETWEEN ISNULL(" + SpecialFieldNames.ValidFrom + ", '1899/01/01') AND hic_validTo" + Environment.NewLine, _archiveTable); sqlToRun += Environment.NewLine; sqlToRun += "\tINSERT @returntable" + Environment.NewLine; sqlToRun += "\tSELECT " + cDotArchiveCols + ",NULL AS hic_validTo, NULL AS hic_userID, 'C' AS hic_status" + Environment.NewLine; //c is for current sqlToRun += string.Format("\tFROM [{0}] c" + Environment.NewLine, _table.GetRuntimeName()); sqlToRun += "\tLEFT OUTER JOIN @returntable a ON " + Environment.NewLine; for (int index = 0; index < _primaryKeys.Length; index++) { sqlToRun += string.Format("\ta.{0}=c.{0} " + Environment.NewLine, syntaxHelper.EnsureWrapped(_primaryKeys[index].GetRuntimeName())); //add the primary key joins if (index + 1 < _primaryKeys.Length) { sqlToRun += "\tAND" + Environment.NewLine; //add an AND because there are more coming } } sqlToRun += string.Format("\tWHERE a.[{0}] IS NULL -- where archive record doesn't exist" + Environment.NewLine, _primaryKeys.First().GetRuntimeName()); sqlToRun += "\tAND @index > ISNULL(c." + SpecialFieldNames.ValidFrom + ", '1899/01/01')" + Environment.NewLine; sqlToRun += Environment.NewLine; sqlToRun += "RETURN" + Environment.NewLine; sqlToRun += "END" + Environment.NewLine; using (var cmd = _server.GetCommand(sqlToRun, con)) cmd.ExecuteNonQuery(); }
public void GetHeadersFromFile(CsvReader r) { //check state if (_state != State.Start) { throw new Exception("Illegal state, headers cannot be read at state " + _state); } _state = State.AfterHeadersRead; //if we are not forcing headers we must get them from the file if (string.IsNullOrWhiteSpace(_forceHeaders)) { //read the first record from the file (this will read the header and first row bool empty = !r.Read(); if (empty) { FileIsEmpty = true; return; } //get headers from first line of the file r.ReadHeader(); _headers = r.Context.HeaderRecord; } else { //user has some specific headers he wants to override with _headers = _forceHeaders.Split(new[] { r.Configuration.Delimiter }, StringSplitOptions.None); r.Configuration.HasHeaderRecord = false; } //ignore these columns (trimmed and ignoring case) if (!string.IsNullOrWhiteSpace(_ignoreColumns)) { IgnoreColumnsList = new HashSet <string>( _ignoreColumns.Split(new[] { r.Configuration.Delimiter }, StringSplitOptions.None) .Select(h => h.Trim()) , StringComparer.CurrentCultureIgnoreCase); } else { IgnoreColumnsList = new HashSet <string>(); } //Make adjustments to the headers (trim etc) //trim them for (int i = 0; i < _headers.Length; i++) { if (!string.IsNullOrWhiteSpace(_headers[i])) { _headers[i] = _headers[i].Trim(); } } //throw away trailing null headers e.g. the header line "Name,Date,,," var trailingNullHeaders = _headers.Reverse().TakeWhile(s => s.IsBasicallyNull()).Count(); if (trailingNullHeaders > 0) { _headers = _headers.Take(_headers.Length - trailingNullHeaders).ToArray(); } //and maybe also help them out with a bit of sanity fixing if (_makeHeaderNamesSane) { for (int i = 0; i < _headers.Length; i++) { _headers[i] = QuerySyntaxHelper.MakeHeaderNameSensible(_headers[i]); } } }
public void Test_MakeHeaderNameSensible_Unicode() { //normal unicode is fine Assert.AreEqual("你好", QuerySyntaxHelper.MakeHeaderNameSensible("你好")); Assert.AreEqual("你好DropDatabaseBob", QuerySyntaxHelper.MakeHeaderNameSensible("你好; drop database bob;")); }
[TestCase("once #upon", "onceUpon")] //Dodgy characters are stripped before cammel casing after spaces so 'u' gets cammeled even though it has a symbol before it. public void TestMakingHeaderNamesSane(string bad, string expectedGood) { Assert.AreEqual(expectedGood, QuerySyntaxHelper.MakeHeaderNameSensible(bad)); }
private void SetupState(State state) { switch (state) { case State.SelectFile: //turn things off pbFile.Visible = false; lblFile.Visible = false; btnClearFile.Visible = false; ragSmileyFile.Visible = false; ddPipeline.DataSource = null; gbPickPipeline.Enabled = false; gbExecute.Enabled = false; gbPickDatabase.Enabled = false; btnConfirmDatabase.Enabled = false; gbTableName.Enabled = false; _selectedFile = null; //turn things on btnBrowse.Visible = true; break; case State.FileSelected: //turn things off btnBrowse.Visible = false; gbExecute.Enabled = false; //turn things on pbFile.Visible = true; gbPickDatabase.Enabled = true; gbTableName.Enabled = true; //text of the file they selected lblFile.Text = _selectedFile.Name; lblFile.Left = pbFile.Right + 2; lblFile.Visible = true; try { tbTableName.Text = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(_selectedFile.Name)); } catch (Exception) { tbTableName.Text = String.Empty; } ragSmileyFile.Visible = true; ragSmileyFile.Left = lblFile.Right + 2; btnClearFile.Left = ragSmileyFile.Right + 2; btnClearFile.Visible = true; IdentifyCompatiblePipelines(); IdentifyCompatibleServers(); break; case State.DatabaseSelected: //turn things off //turn things on gbExecute.Enabled = true; gbPickDatabase.Enabled = true; //user still might want to change his mind about targets btnConfirmDatabase.Enabled = false; gbTableName.Enabled = true; break; default: throw new ArgumentOutOfRangeException("state"); } }
private DataTable GetAllData(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { Stopwatch sw = new Stopwatch(); sw.Start(); if (_fileToLoad == null) { throw new Exception("_fileToLoad has not been set yet, possibly component has not been Initialized yet"); } if (!IsAcceptableFileExtension()) { throw new Exception("FileToLoad (" + _fileToLoad.File.FullName + ") extension was not XLS or XLSX, dubious"); } using (var fs = new FileStream(_fileToLoad.File.FullName, FileMode.Open)) { IWorkbook wb; if (_fileToLoad.File.Extension == ".xls") { wb = new HSSFWorkbook(fs); } else { wb = new XSSFWorkbook(fs); } DataTable toReturn; try { ISheet worksheet; //if the user hasn't picked one, use the first worksheet = string.IsNullOrWhiteSpace(WorkSheetName) ? wb.GetSheetAt(0) : wb.GetSheet(WorkSheetName); toReturn = GetAllData(worksheet, listener); //set the table name the file name toReturn.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(_fileToLoad.File.Name)); if (toReturn.Columns.Count == 0) { throw new FlatFileLoadException(string.Format("The Excel sheet '{0}' in workbook '{1}' is empty", worksheet.SheetName, _fileToLoad.File.Name)); } //if the user wants a column in the DataTable storing the filename loaded add it if (!string.IsNullOrWhiteSpace(AddFilenameColumnNamed)) { toReturn.Columns.Add(AddFilenameColumnNamed); foreach (DataRow dataRow in toReturn.Rows) { dataRow[AddFilenameColumnNamed] = _fileToLoad.File.FullName; } } } finally { wb.Close(); } return(toReturn); } }
public override void Execute() { base.Execute(); if (IsSingleObject) { //Extract a single object (to file) if (TargetFileInfo == null && BasicActivator.IsInteractive) { TargetFileInfo = BasicActivator.SelectFile("Path to output share definition to", "Share Definition", "*.sd"); if (TargetFileInfo == null) { return; } } } else { if (TargetDirectoryInfo == null && BasicActivator.IsInteractive) { TargetDirectoryInfo = BasicActivator.SelectDirectory("Output Directory"); if (TargetDirectoryInfo == null) { return; } } } if (TargetFileInfo != null && IsSingleObject) { var d = _gatherer.GatherDependencies(_toExport[0]); var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); File.WriteAllText(TargetFileInfo.FullName, serial); return; } if (TargetDirectoryInfo == null) { throw new Exception("No output directory set"); } foreach (var o in _toExport) { var d = _gatherer.GatherDependencies(o); var filename = QuerySyntaxHelper.MakeHeaderNameSensible(o.ToString()) + ".sd"; var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); var f = Path.Combine(TargetDirectoryInfo.FullName, filename); File.WriteAllText(f, serial); } if (ShowInExplorer && TargetDirectoryInfo != null) { UsefulStuff.GetInstance().ShowFolderInWindowsExplorer(TargetDirectoryInfo); } }