private DataTable GenerateTable() { var dt = new DataTable(); if (_file != null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(_file.File.Name); } foreach (var h in HeadersToRead.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { dt.Columns.Add(h); } return(dt); }
/// <summary> /// Lists the names of all parameters required by the supplied whereSql e.g. @bob = 'bob' would return "@bob" unless /// there is already a global parameter called @bob. globals is optional, pass in null if there aren't any /// </summary> /// <param name="whereSql">the SQL filter WHERE section you want to determine the parameter names in, does. Should not nclude WHERE (only the boolean logic bit)</param> /// <param name="globals">optional parameter, an enumerable of parameters that already exist in a superscope (i.e. global parametetrs)</param> /// <returns>parameter names that are required by the SQL but are not already declared in the globals</returns> private HashSet <string> GetRequiredParamaterNamesForQuery(string whereSql, IEnumerable <ISqlParameter> globals) { HashSet <string> toReturn = QuerySyntaxHelper.GetAllParameterNamesFromQuery(whereSql); //remove any global parameters (these don't need to be created) if (globals != null) { foreach (ISqlParameter globalExtractionFilterParameter in globals) { if (toReturn.Contains(globalExtractionFilterParameter.ParameterName)) { toReturn.Remove(globalExtractionFilterParameter.ParameterName); } } } return(toReturn); }
private void SetEnabledness() { textBox1.ForeColor = Color.Black; //if there's some text typed and we want typed text to be sane if (RequireSaneHeaderText && !string.IsNullOrWhiteSpace(textBox1.Text)) { //if the sane name doesn't match the if (!textBox1.Text.Equals(QuerySyntaxHelper.MakeHeaderNameSensible(textBox1.Text), StringComparison.CurrentCultureIgnoreCase)) { btnOk.Enabled = false; textBox1.ForeColor = Color.Red; return; } } btnOk.Enabled = (!string.IsNullOrWhiteSpace(ResultText)) || _allowBlankText; }
private DataTable GenerateTransposedTable(DataTable inputTable) { DataTable outputTable = new DataTable(); // Add columns by looping rows // Header row's first column is same as in inputTable outputTable.Columns.Add(inputTable.Columns[0].ColumnName); // Header row's second column onwards, 'inputTable's first column taken foreach (DataRow inRow in inputTable.Rows) { string newColName = inRow[0].ToString(); if (MakeHeaderNamesSane) { newColName = QuerySyntaxHelper.MakeHeaderNameSensible(newColName); } outputTable.Columns.Add(newColName); } // Add rows by looping columns for (int rCount = 1; rCount <= inputTable.Columns.Count - 1; rCount++) { DataRow newRow = outputTable.NewRow(); // First column is inputTable's Header row's second column newRow[0] = inputTable.Columns[rCount].ColumnName; for (int cCount = 0; cCount <= inputTable.Rows.Count - 1; cCount++) { string colValue = inputTable.Rows[cCount][rCount].ToString(); newRow[cCount + 1] = colValue; } outputTable.Rows.Add(newRow); } return(outputTable); }
public void DoWeNeedToMakeTagsSane() { Dictionary <string, string> dodgy = new Dictionary <string, string>(); //For all tags foreach (DicomDictionaryEntry entry in DicomDictionary.Default) { //that can be Sequences if (entry.Keyword != QuerySyntaxHelper.MakeHeaderNameSane(entry.Keyword)) { dodgy.Add(entry.Keyword, QuerySyntaxHelper.MakeHeaderNameSane(entry.Keyword)); } } foreach (var kvp in dodgy) { Console.WriteLine(kvp.Key + "|" + kvp.Value); } //no we don't Assert.AreEqual(0, dodgy.Count); }
private static string GetStringRepresentation(object o, bool allowDates, QuerySyntaxHelper escaper = null) { if (o == null || o == DBNull.Value) { return(null); } var s = o as string; if (s != null && allowDates) { DateTime dt; if (DateTime.TryParse(s, out dt)) { return(GetStringRepresentation(dt)); } } if (o is DateTime) { return(GetStringRepresentation((DateTime)o)); } var str = o.ToString(); if (escaper != null) { str = escaper.Escape(str); } else { str = str.Replace("\"", "\"\""); } return(str); }
public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { _listener = listener; if (_fileWorklist == null) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipping component because _fileWorklist is null")); return(null); } _stopwatch.Start(); var dt = base.GetDataTable(); try { FileInfo file; DirectoryInfo directory; if (!_fileWorklist.GetNextFileOrDirectoryToProcess(out directory, out file)) { return(null); } if (file != null && directory == null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(file.Name)); } else if (directory != null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(directory.Name)); } else { throw new Exception("Expected IDicomProcessListProvider to return either a DirectoryInfo or a FileInfo not both/neither"); } if (directory != null) { ProcessDirectoryAsync(dt, directory, listener); Task.WaitAll(tasks.ToArray()); } else //Input is a single zip file if (file.Extension == ".zip") { ProcessZipArchive(dt, listener, file.FullName); } else if (file.Extension == ".dcm") { using (var fs = file.Open(FileMode.Open)) ProcessFile(fs, dt, file.Name, listener); } else { throw new Exception("Expected file to be either .zip or .dcm "); } } finally { //stop recording performance _stopwatch.Stop(); //let people know how far through we are UpdateProgressListeners(); } return(dt); }
private string GetTriggerName() { return(QuerySyntaxHelper.MakeHeaderNameSensible(_table.GetRuntimeName()) + "_OnUpdate"); }
public static void WriteDataTable(DataTable sourceTable, TextWriter writer, bool includeHeaders, QuerySyntaxHelper escaper = null) { if (includeHeaders) { IEnumerable <string> headerValues = sourceTable.Columns .OfType <DataColumn>() .Select(column => QuoteValue(column.ColumnName)); writer.WriteLine(String.Join(",", headerValues)); } var typeDictionary = sourceTable.Columns.Cast <DataColumn>().ToDictionary(c => c, c => new Guesser()); foreach (var kvp in typeDictionary) { kvp.Value.AdjustToCompensateForValues(kvp.Key); } foreach (DataRow row in sourceTable.Rows) { var line = new List <string>(); foreach (DataColumn col in sourceTable.Columns) { line.Add(QuoteValue(GetStringRepresentation(row[col], typeDictionary[col].Guess.CSharpType == typeof(DateTime), escaper))); } writer.WriteLine(String.Join(",", line)); } writer.Flush(); }
public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { if (toProcess == null) { return(null); } IDatabaseColumnRequestAdjuster adjuster = null; if (Adjuster != null) { var constructor = new ObjectConstructor(); adjuster = (IDatabaseColumnRequestAdjuster)constructor.Construct(Adjuster); } //work out the table name for the table we are going to create if (TargetTableName == null) { if (string.IsNullOrWhiteSpace(toProcess.TableName)) { throw new Exception("Chunk did not have a TableName, did not know what to call the newly created table"); } TargetTableName = QuerySyntaxHelper.MakeHeaderNameSane(toProcess.TableName); } ClearPrimaryKeyFromDataTableAndExplicitWriteTypes(toProcess); StartAuditIfExists(TargetTableName); if (_loggingDatabaseListener != null) { listener = new ForkDataLoadEventListener(listener, _loggingDatabaseListener); } EnsureTableHasDataInIt(toProcess); bool createdTable = false; if (_firstTime) { bool tableAlreadyExistsButEmpty = false; if (!_database.Exists()) { throw new Exception("Database " + _database + " does not exist"); } discoveredTable = _database.ExpectTable(TargetTableName); //table already exists if (discoveredTable.Exists()) { tableAlreadyExistsButEmpty = true; if (!AllowLoadingPopulatedTables) { if (discoveredTable.IsEmpty()) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found table " + TargetTableName + " already, normally this would forbid you from loading it (data duplication / no primary key etc) but it is empty so we are happy to load it, it will not be created")); } else { throw new Exception("There is already a table called " + TargetTableName + " at the destination " + _database); } } if (AllowResizingColumnsAtUploadTime) { _dataTypeDictionary = discoveredTable.DiscoverColumns().ToDictionary(k => k.GetRuntimeName(), v => v.GetDataTypeComputer(), StringComparer.CurrentCultureIgnoreCase); } } else { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Determined that the table name " + TargetTableName + " is unique at destination " + _database)); } //create connection to destination if (!tableAlreadyExistsButEmpty) { createdTable = true; if (AllowResizingColumnsAtUploadTime) { _database.CreateTable(out _dataTypeDictionary, TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster); } else { _database.CreateTable(TargetTableName, toProcess, ExplicitTypes.ToArray(), true, adjuster); } listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Created table " + TargetTableName + " successfully.")); } _managedConnection = _server.BeginNewTransactedConnection(); _bulkcopy = discoveredTable.BeginBulkInsert(_managedConnection.ManagedTransaction); if (Culture != null) { _bulkcopy.DateTimeDecider.Culture = Culture; } _firstTime = false; } try { if (AllowResizingColumnsAtUploadTime && !createdTable) { ResizeColumnsIfRequired(toProcess, listener); } //push the data swTimeSpentWritting.Start(); _affectedRows += _bulkcopy.Upload(toProcess); swTimeSpentWritting.Stop(); listener.OnProgress(this, new ProgressEventArgs("Uploading to " + TargetTableName, new ProgressMeasurement(_affectedRows, ProgressType.Records), swTimeSpentWritting.Elapsed)); } catch (Exception e) { _managedConnection.ManagedTransaction.AbandonAndCloseConnection(); if (LoggingServer != null) { _dataLoadInfo.LogFatalError(GetType().Name, ExceptionHelper.ExceptionToListOfInnerMessages(e, true)); } throw new Exception("Failed to write rows (in transaction) to table " + TargetTableName, e); } return(null); }
private void CreateViewOldVersionsTableValuedFunction(string sqlUsedToCreateArchiveTableSQL, DbConnection con) { string columnsInArchive = ""; var syntaxHelper = new MicrosoftQuerySyntaxHelper(); Match matchStartColumnExtraction = Regex.Match(sqlUsedToCreateArchiveTableSQL, "CREATE TABLE .*\\("); if (!matchStartColumnExtraction.Success) { throw new Exception("Could not find regex match at start of Archive table CREATE SQL"); } int startExtractingColumnsAt = matchStartColumnExtraction.Index + matchStartColumnExtraction.Length; //trim off excess crud at start and we should have just the columns bit of the create (plus crud at the end) columnsInArchive = sqlUsedToCreateArchiveTableSQL.Substring(startExtractingColumnsAt); //trim off excess crud at the end columnsInArchive = columnsInArchive.Trim(new[] { ')', '\r', '\n' }); string sqlToRun = string.Format("CREATE FUNCTION [" + _schema + "].[{0}_Legacy]", QuerySyntaxHelper.MakeHeaderNameSensible(_table.GetRuntimeName())); sqlToRun += Environment.NewLine; sqlToRun += "(" + Environment.NewLine; sqlToRun += "\t@index DATETIME" + Environment.NewLine; sqlToRun += ")" + Environment.NewLine; sqlToRun += "RETURNS @returntable TABLE" + Environment.NewLine; sqlToRun += "(" + Environment.NewLine; sqlToRun += "/*the return table will follow the structure of the Archive table*/" + Environment.NewLine; sqlToRun += columnsInArchive; //these were added during transaction so we have to specify them again here because transaction will not have been committed yet sqlToRun = sqlToRun.Trim(); sqlToRun += "," + Environment.NewLine; sqlToRun += "\thic_validTo datetime," + Environment.NewLine; sqlToRun += "\thic_userID varchar(128),"; sqlToRun += "\thic_status char(1)"; sqlToRun += ")" + Environment.NewLine; sqlToRun += "AS" + Environment.NewLine; sqlToRun += "BEGIN" + Environment.NewLine; sqlToRun += Environment.NewLine; var liveCols = _columns.Select(c => "[" + c.GetRuntimeName() + "]").Union(new String[] { '[' + SpecialFieldNames.DataLoadRunID + ']', '[' + SpecialFieldNames.ValidFrom + ']' }).ToArray(); string archiveCols = string.Join(",", liveCols) + ",hic_validTo,hic_userID,hic_status"; string cDotArchiveCols = string.Join(",", liveCols.Select(s => "c." + s)); sqlToRun += "\tINSERT @returntable" + Environment.NewLine; sqlToRun += string.Format("\tSELECT " + archiveCols + " FROM [{0}] WHERE @index BETWEEN ISNULL(" + SpecialFieldNames.ValidFrom + ", '1899/01/01') AND hic_validTo" + Environment.NewLine, _archiveTable); sqlToRun += Environment.NewLine; sqlToRun += "\tINSERT @returntable" + Environment.NewLine; sqlToRun += "\tSELECT " + cDotArchiveCols + ",NULL AS hic_validTo, NULL AS hic_userID, 'C' AS hic_status" + Environment.NewLine; //c is for current sqlToRun += string.Format("\tFROM [{0}] c" + Environment.NewLine, _table.GetRuntimeName()); sqlToRun += "\tLEFT OUTER JOIN @returntable a ON " + Environment.NewLine; for (int index = 0; index < _primaryKeys.Length; index++) { sqlToRun += string.Format("\ta.{0}=c.{0} " + Environment.NewLine, syntaxHelper.EnsureWrapped(_primaryKeys[index].GetRuntimeName())); //add the primary key joins if (index + 1 < _primaryKeys.Length) { sqlToRun += "\tAND" + Environment.NewLine; //add an AND because there are more coming } } sqlToRun += string.Format("\tWHERE a.[{0}] IS NULL -- where archive record doesn't exist" + Environment.NewLine, _primaryKeys.First().GetRuntimeName()); sqlToRun += "\tAND @index > ISNULL(c." + SpecialFieldNames.ValidFrom + ", '1899/01/01')" + Environment.NewLine; sqlToRun += Environment.NewLine; sqlToRun += "RETURN" + Environment.NewLine; sqlToRun += "END" + Environment.NewLine; using (var cmd = _server.GetCommand(sqlToRun, con)) cmd.ExecuteNonQuery(); }
public void TestExtractionOfParmaetersFromSQL_NoneOne(string sql) { Assert.AreEqual(0, QuerySyntaxHelper.GetAllParameterNamesFromQuery(sql).Count); }
public void Test_MakeHeaderNameSensible_Unicode() { //normal unicode is fine Assert.AreEqual("你好", QuerySyntaxHelper.MakeHeaderNameSensible("你好")); Assert.AreEqual("你好DropDatabaseBob", QuerySyntaxHelper.MakeHeaderNameSensible("你好; drop database bob;")); }
public DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { try { _listener = listener; int rowsRead = 0; if (_fileToLoad == null) { throw new Exception( "_fileToLoad was not set, it is supposed to be set because of IPipelineRequirement<FlatFileToLoad> - maybe this PreInitialize method was not called?"); } if (Headers == null) { InitializeComponents(); //open the file OpenFile(_fileToLoad.File); if (Headers.FileIsEmpty) { EventHandlers.FileIsEmpty(); return(null); } } //if we do not yet have a data table to load if (_workingTable == null) { //create a table with the name of the file _workingTable = Headers.GetDataTableWithHeaders(_listener); _workingTable.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(_fileToLoad.File.Name)); //set the data table to the new untyped but correctly headered table SetDataTable(_workingTable); //Now we must read some data if (StronglyTypeInput && StronglyTypeInputBatchSize != 0) { int batchSizeToLoad = StronglyTypeInputBatchSize == -1 ? int.MaxValue : StronglyTypeInputBatchSize; if (batchSizeToLoad < 500) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "You set StronglyTypeInputBatchSize to " + batchSizeToLoad + " this will be increased to 500 because that number is too small!", null)); batchSizeToLoad = 500; } //user want's to strongly type input with a custom batch size rowsRead = IterativelyBatchLoadDataIntoDataTable(_workingTable, batchSizeToLoad); } else { //user does not want to strongly type or is strongly typing with regular batch size rowsRead = IterativelyBatchLoadDataIntoDataTable(_workingTable, MaxBatchSize); } if (StronglyTypeInput) { StronglyTypeWorkingTable(); } if (rowsRead == 0) { EventHandlers.FileIsEmpty(); } } else { //this isn't the first pass, so we have everything set up and can just read more data //data table has been set so has a good schema or no schema depending on what user wanted, at least it has all the headers etc setup correctly //so just clear the rows we loaded last chunk and load more _workingTable.Rows.Clear(); //get more rows rowsRead = IterativelyBatchLoadDataIntoDataTable(_workingTable, MaxBatchSize); } //however we read //if rows were not read if (rowsRead == 0) { return(null);//we are done } //rows were read so return a copy of the DataTable, because we will continually reload the same DataTable schema throughout the file we don't want to give up our reference to good headers incase someone mutlates it var copy = _workingTable.Copy(); foreach (DataColumn unamed in Headers.UnamedColumns) { copy.Columns.Remove(unamed.ColumnName); } return(copy); } catch (Exception) { //make sure file is closed if it crashes if (_reader != null) { _reader.Dispose(); } throw; } }
public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { _listener = listener; if (_fileWorklist == null) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Skipping component because _fileWorklist is null")); return(null); } _stopwatch.Start(); var dt = GetDataTable(); try { if (!_fileWorklist.GetNextFileOrDirectoryToProcess(out var directory, out var file)) { return(null); } // Exactly one of file/directory must be null: if ((file != null) == (directory != null)) { throw new Exception("Expected IDicomProcessListProvider to return either a DirectoryInfo or a FileInfo not both/neither"); } if (file != null) { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(file.FullPath)); if (file.FullPath != null && file.FullPath.EndsWith(".zip")) { //Input is a single zip file ProcessZipArchive(dt, listener, file.FullPath); } else { var df = file.GetDataset(_zipPool); ProcessDataset(file.FullPath, df.Dataset, dt, listener); } } if (directory != null) { // Processing a directory dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(directory.Name)); ProcessDirectoryAsync(dt, directory, listener); Task.WaitAll(tasks.ToArray()); } } finally { //stop recording performance _stopwatch.Stop(); //let people know how far through we are UpdateProgressListeners(); } return(dt); }
private DataTable GetAllData(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { Stopwatch sw = new Stopwatch(); sw.Start(); if (_fileToLoad == null) { throw new Exception("_fileToLoad has not been set yet, possibly component has not been Initialized yet"); } if (!IsAcceptableFileExtension()) { throw new Exception("FileToLoad (" + _fileToLoad.File.FullName + ") extension was not XLS or XLSX, dubious"); } using (var fs = new FileStream(_fileToLoad.File.FullName, FileMode.Open)) { IWorkbook wb; if (_fileToLoad.File.Extension == ".xls") { wb = new HSSFWorkbook(fs); } else { wb = new XSSFWorkbook(fs); } DataTable toReturn; try { ISheet worksheet; //if the user hasn't picked one, use the first worksheet = string.IsNullOrWhiteSpace(WorkSheetName) ? wb.GetSheetAt(0) : wb.GetSheet(WorkSheetName); toReturn = GetAllData(worksheet, listener); //set the table name the file name toReturn.TableName = QuerySyntaxHelper.MakeHeaderNameSane(Path.GetFileNameWithoutExtension(_fileToLoad.File.Name)); if (toReturn.Columns.Count == 0) { throw new FlatFileLoadException(string.Format("The Excel sheet '{0}' in workbook '{1}' is empty", worksheet.SheetName, _fileToLoad.File.Name)); } //if the user wants a column in the DataTable storing the filename loaded add it if (!string.IsNullOrWhiteSpace(AddFilenameColumnNamed)) { toReturn.Columns.Add(AddFilenameColumnNamed); foreach (DataRow dataRow in toReturn.Rows) { dataRow[AddFilenameColumnNamed] = _fileToLoad.File.FullName; } } } finally { wb.Close(); } return(toReturn); } }
private void SetupState(State state) { switch (state) { case State.SelectFile: //turn things off pbFile.Visible = false; lblFile.Visible = false; btnClearFile.Visible = false; ragSmileyFile.Visible = false; ddPipeline.DataSource = null; gbPickPipeline.Enabled = false; gbExecute.Enabled = false; gbPickDatabase.Enabled = false; btnConfirmDatabase.Enabled = false; gbTableName.Enabled = false; _selectedFile = null; //turn things on btnBrowse.Visible = true; break; case State.FileSelected: //turn things off btnBrowse.Visible = false; gbExecute.Enabled = false; //turn things on pbFile.Visible = true; gbPickDatabase.Enabled = true; gbTableName.Enabled = true; //text of the file they selected lblFile.Text = _selectedFile.Name; lblFile.Left = pbFile.Right + 2; lblFile.Visible = true; try { tbTableName.Text = QuerySyntaxHelper.MakeHeaderNameSensible(Path.GetFileNameWithoutExtension(_selectedFile.Name)); } catch (Exception) { tbTableName.Text = String.Empty; } ragSmileyFile.Visible = true; ragSmileyFile.Left = lblFile.Right + 2; btnClearFile.Left = ragSmileyFile.Right + 2; btnClearFile.Visible = true; IdentifyCompatiblePipelines(); IdentifyCompatibleServers(); break; case State.DatabaseSelected: //turn things off //turn things on gbExecute.Enabled = true; gbPickDatabase.Enabled = true; //user still might want to change his mind about targets btnConfirmDatabase.Enabled = false; gbTableName.Enabled = true; break; default: throw new ArgumentOutOfRangeException("state"); } }
public override void Execute() { base.Execute(); if (IsSingleObject) { //Extract a single object (to file) if (TargetFileInfo == null && BasicActivator.IsInteractive) { TargetFileInfo = BasicActivator.SelectFile("Path to output share definition to", "Share Definition", "*.sd"); if (TargetFileInfo == null) { return; } } } else { if (TargetDirectoryInfo == null && BasicActivator.IsInteractive) { TargetDirectoryInfo = BasicActivator.SelectDirectory("Output Directory"); if (TargetDirectoryInfo == null) { return; } } } if (TargetFileInfo != null && IsSingleObject) { var d = _gatherer.GatherDependencies(_toExport[0]); var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); File.WriteAllText(TargetFileInfo.FullName, serial); return; } if (TargetDirectoryInfo == null) { throw new Exception("No output directory set"); } foreach (var o in _toExport) { var d = _gatherer.GatherDependencies(o); var filename = QuerySyntaxHelper.MakeHeaderNameSensible(o.ToString()) + ".sd"; var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); var f = Path.Combine(TargetDirectoryInfo.FullName, filename); File.WriteAllText(f, serial); } if (ShowInExplorer && TargetDirectoryInfo != null) { UsefulStuff.GetInstance().ShowFolderInWindowsExplorer(TargetDirectoryInfo); } }
protected virtual object GetTriggerName() { return(QuerySyntaxHelper.MakeHeaderNameSensible(_table.GetRuntimeName()) + "_onupdate"); }
[TestCase("once #upon", "onceUpon")] //Dodgy characters are stripped before cammel casing after spaces so 'u' gets cammeled even though it has a symbol before it. public void TestMakingHeaderNamesSane(string bad, string expectedGood) { Assert.AreEqual(expectedGood, QuerySyntaxHelper.MakeHeaderNameSensible(bad)); }
public void GetHeadersFromFile(CsvReader r) { //check state if (_state != State.Start) { throw new Exception("Illegal state, headers cannot be read at state " + _state); } _state = State.AfterHeadersRead; //if we are not forcing headers we must get them from the file if (string.IsNullOrWhiteSpace(_forceHeaders)) { //read the first record from the file (this will read the header and first row bool empty = !r.Read(); if (empty) { FileIsEmpty = true; return; } //get headers from first line of the file r.ReadHeader(); _headers = r.Context.HeaderRecord; } else { //user has some specific headers he wants to override with _headers = _forceHeaders.Split(new[] { r.Configuration.Delimiter }, StringSplitOptions.None); r.Configuration.HasHeaderRecord = false; } //ignore these columns (trimmed and ignoring case) if (!string.IsNullOrWhiteSpace(_ignoreColumns)) { IgnoreColumnsList = new HashSet <string>( _ignoreColumns.Split(new[] { r.Configuration.Delimiter }, StringSplitOptions.None) .Select(h => h.Trim()) , StringComparer.CurrentCultureIgnoreCase); } else { IgnoreColumnsList = new HashSet <string>(); } //Make adjustments to the headers (trim etc) //trim them for (int i = 0; i < _headers.Length; i++) { if (!string.IsNullOrWhiteSpace(_headers[i])) { _headers[i] = _headers[i].Trim(); } } //throw away trailing null headers e.g. the header line "Name,Date,,," var trailingNullHeaders = _headers.Reverse().TakeWhile(s => s.IsBasicallyNull()).Count(); if (trailingNullHeaders > 0) { _headers = _headers.Take(_headers.Length - trailingNullHeaders).ToArray(); } //and maybe also help them out with a bit of sanity fixing if (_makeHeaderNamesSane) { for (int i = 0; i < _headers.Length; i++) { _headers[i] = QuerySyntaxHelper.MakeHeaderNameSane(_headers[i]); } } }
public void TestExtractionOfParmaetersFromSQL_FindOne(string sql) { Assert.AreEqual("@bobby", QuerySyntaxHelper.GetAllParameterNamesFromQuery(sql).SingleOrDefault()); }