private void CleanupTruncateCommand() { var lds = new DiscoveredServer(UnitTestLoggingConnectionString); using (var con = lds.GetConnection()) { con.Open(); lds.GetCommand("DELETE FROM DataLoadTask where LOWER(dataSetID) like '%truncate%'", con).ExecuteNonQuery(); lds.GetCommand("DELETE FROM DataSet where LOWER(dataSetID) like '%truncate%'", con).ExecuteNonQuery(); } }
private int ExtractSQL(string sql, string tableName, DbConnection con) { DbCommand cmdExtract = _server.GetCommand(sql, con); if (!Directory.Exists(_outputDirectory.FullName)) { Directory.CreateDirectory(_outputDirectory.FullName); } OutputFilename = _outputDirectory.FullName + "\\" + tableName.Replace("[", "").Replace("]", "").ToLower().Trim() + ".csv"; StreamWriter sw = new StreamWriter(OutputFilename); cmdExtract.CommandTimeout = 500000; DbDataReader r = cmdExtract.ExecuteReader(); WriteHeader(sw, r, _separator, _dateTimeFormat); int linesWritten = WriteBody(sw, r, _separator, _dateTimeFormat); r.Close(); sw.Flush(); sw.Close(); return(linesWritten); }
public bool HasAnyEvaluations(Catalogue catalogue) { using (var con = GetConnection()) { return(Convert.ToBoolean(DiscoveredServer.GetCommand("SELECT case when exists (select 1 from Evaluation where CatalogueID = " + catalogue.ID + ") then 1 else 0 end", con).ExecuteScalar())); } }
private DateTime GetMaxDate(DiscoveredServer server, IDataLoadEventListener listener) { DateTime dt; using (var con = server.GetConnection()) { con.Open(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to execute SQL to determine the maximum date for data loaded:" + ExecuteScalarSQL)); using (var cmd = server.GetCommand(ExecuteScalarSQL, con)) { var scalarValue = cmd.ExecuteScalar(); if (scalarValue == null || scalarValue == DBNull.Value) { throw new DataLoadProgressUpdateException("ExecuteScalarSQL specified for determining the maximum date of data loaded returned null when executed"); } try { dt = Convert.ToDateTime(scalarValue); } catch (Exception e) { throw new DataLoadProgressUpdateException("ExecuteScalarSQL specified for determining the maximum date of data loaded returned a value that was not a Date:" + scalarValue, e); } } return(dt); } }
private void LoadDataTableAsync(DiscoveredServer server, string sql) { //it is already running and not completed if (_task != null && !_task.IsCompleted) { return; } HideFatal(); pbLoading.Visible = true; llCancel.Visible = true; _task = Task.Factory.StartNew(() => { int timeout = 1000; while (!IsHandleCreated && timeout > 0) { timeout -= 10; Thread.Sleep(10); } try { //then execute the command using (DbConnection con = server.GetConnection()) { con.Open(); _cmd = server.GetCommand(sql, con); _cmd.CommandTimeout = _timeoutControls.Timeout; DbDataAdapter a = server.GetDataAdapter(_cmd); DataTable dt = new DataTable(); a.Fill(dt); MorphBinaryColumns(dt); Invoke(new MethodInvoker(() => { dataGridView1.DataSource = dt; })); con.Close(); } } catch (Exception e) { ShowFatal(e); } finally { if (IsHandleCreated) { Invoke(new MethodInvoker(() => { pbLoading.Visible = false; llCancel.Visible = false; })); } } }); }
/// <summary> /// Looks up data stored in the Catalogue with a query matching on any of the provided uids. All values must be supplied if the Catalogue has a column of the corresponding name (i.e. if Catalogue has SeriesInstanceUID you must supply <paramref name="seriesuid"/>) /// </summary> /// <param name="studyuid"></param> /// <param name="seriesuid"></param> /// <param name="imageuid"></param> /// <returns></returns> public bool DoLookup(string studyuid, string seriesuid, string imageuid) { string sql = _queryBuilder.SQL; using (var con = _server.GetConnection()) { con.Open(); using (var cmd = _server.GetCommand(sql, con)) { //Add the current row UIDs to the parameters of the command if (_studyFilter != null) { _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultStudyIdColumnName, cmd, studyuid); } if (_seriesFilter != null) { _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultSeriesIdColumnName, cmd, seriesuid); } if (_instanceFilter != null) { _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultInstanceIdColumnName, cmd, imageuid); } using (var r = cmd.ExecuteReader()) { //if we can read a record then we have an entry in the blacklist return(r.Read()); } } } }
/// <summary> /// Marks that the data load ended /// </summary> public void CloseAndMarkComplete() { lock (oLock) { //prevent double closing if (_isClosed) { return; } _endTime = DateTime.Now; using (var con = _server.BeginNewTransactedConnection()) { try { DbCommand cmdUpdateToClosed = _server.GetCommand("UPDATE DataLoadRun SET endTime=@endTime WHERE ID=@ID", con); _server.AddParameterWithValueToCommand("@endTime", cmdUpdateToClosed, DateTime.Now); _server.AddParameterWithValueToCommand("@ID", cmdUpdateToClosed, ID); int rowsAffected = cmdUpdateToClosed.ExecuteNonQuery(); if (rowsAffected != 1) { throw new Exception( "Error closing off DataLoad in database, the update command resulted in " + rowsAffected + " rows being affected (expected 1) - will try to rollback"); } con.ManagedTransaction.CommitAndCloseConnection(); _isClosed = true; } catch (Exception) { //if something goes wrong with the update, roll it back con.ManagedTransaction.AbandonAndCloseConnection(); throw; } //once a record has been commited to the database it is redundant and no further attempts to read/change it should be made by anyone foreach (TableLoadInfo t in this.TableLoads.Values) { //close any table loads that have not yet completed if (!t.IsClosed) { t.CloseAndArchive(); } } } } }
public void CreateDataLoadTask(string taskName) { using (var con = _loggingServer.GetConnection()) { con.Open(); var datasetName = "Test_" + taskName; var datasetCmd = _loggingServer.GetCommand("INSERT INTO DataSet (dataSetID) VALUES ('" + datasetName + "')", con); datasetCmd.ExecuteNonQuery(); _sqlToCleanUp.Push("DELETE FROM DataSet WHERE dataSetID = '" + datasetName + "'"); var taskCmd = _loggingServer.GetCommand( "INSERT INTO DataLoadTask VALUES (100, '" + taskName + "', '" + taskName + "', GETDATE(), '" + datasetName + "', 1, 1, '" + datasetName + "')", con); taskCmd.ExecuteNonQuery(); _sqlToCleanUp.Push("DELETE FROM DataLoadTask WHERE dataSetID = '" + datasetName + "'"); } }
/// <summary> /// Update the database <paramref name="server"/> to redact the <paramref name="failure"/>. /// </summary> /// <param name="server">Where to connect to get the data, can be null if <see cref="RulesOnly"/> is true</param> /// <param name="failure">The failure to redact/create a rule for</param> /// <param name="usingRule">Pass null to create a new rule or give value to reuse an existing rule</param> public void Update(DiscoveredServer server, Failure failure, IsIdentifiableRule usingRule) { //theres no rule yet so create one (and add to RedList.yaml) if (usingRule == null) { usingRule = Add(failure, RuleAction.Report); } //if we are running in rules only mode we don't need to also update the database if (RulesOnly) { return; } var syntax = server.GetQuerySyntaxHelper(); //the fully specified name e.g. [mydb]..[mytbl] string tableName = failure.Resource; var tokens = tableName.Split('.', StringSplitOptions.RemoveEmptyEntries); var db = tokens.First(); tableName = tokens.Last(); if (string.IsNullOrWhiteSpace(db) || string.IsNullOrWhiteSpace(tableName) || string.Equals(db, tableName)) { throw new NotSupportedException($"Could not understand table name {failure.Resource}, maybe it is not full specified with a valid database and table name?"); } db = syntax.GetRuntimeName(db); tableName = syntax.GetRuntimeName(tableName); DiscoveredTable table = server.ExpectDatabase(db).ExpectTable(tableName); //if we've never seen this table before if (!_primaryKeys.ContainsKey(table)) { var pk = table.DiscoverColumns().SingleOrDefault(k => k.IsPrimaryKey); _primaryKeys.Add(table, pk); } using (var con = server.GetConnection()) { con.Open(); foreach (var sql in UpdateStrategy.GetUpdateSql(table, _primaryKeys, failure, usingRule)) { var cmd = server.GetCommand(sql, con); cmd.ExecuteNonQuery(); } } }
public void Example_TableCreation() { //Load implementation assemblies that are relevant to your application ImplementationManager.Load( typeof(FAnsi.Implementations.MicrosoftSQL.MicrosoftSQLImplementation).Assembly, typeof(FAnsi.Implementations.Oracle.OracleImplementation).Assembly, typeof(FAnsi.Implementations.MySql.MySqlImplementation).Assembly); //Create some test data DataTable dt = new DataTable(); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Rows.Add("Frank", "2001-01-01"); dt.Rows.Add("Dave", "2001-01-01"); //Create a server object //var server = new DiscoveredServer(@"server=localhost\sqlexpress;Trusted_Connection=True;", DatabaseType.MicrosoftSQLServer); var server = new DiscoveredServer(@"Server=localhost;Uid=root;Pwd=zombie;SSL-Mode=None", DatabaseType.MySql); //Find the database var database = server.ExpectDatabase("FAnsiTests"); //Or create it if (!database.Exists()) { database.Create(); } //Create a table that can store the data in dt var table = database.CreateTable("MyTable", dt); //Table has 2 rows in it Console.WriteLine("Table {0} has {1} rows", table.GetFullyQualifiedName(), table.GetRowCount()); Console.WriteLine("Column Name is of type {0}", table.DiscoverColumn("Name").DataType.SQLType); Console.WriteLine("Column DateOfBirth is of type {0}", table.DiscoverColumn("DateOfBirth").DataType.SQLType); using (DbConnection con = server.GetConnection()) { con.Open(); DbCommand cmd = server.GetCommand("Select * from " + table.GetFullyQualifiedName(), con); DbDataReader r = cmd.ExecuteReader(); while (r.Read()) { Console.WriteLine(string.Join(",", r["Name"], r["DateOfBirth"])); } } //Drop the table afterwards table.Drop(); }
private int GetDataTaskId(string dataTask, DiscoveredServer server, DbConnection con) { using (var cmd = server.GetCommand("SELECT ID FROM DataLoadTask WHERE name = @name", con)) { var p = cmd.CreateParameter(); p.ParameterName = "@name"; p.Value = dataTask; cmd.Parameters.Add(p); return(Convert.ToInt32(cmd.ExecuteScalar())); } }
private void CheckDatasetExists(ICheckNotifier notifier, string dataSetID) { using (var conn = _server.GetConnection()) { conn.Open(); var cmd = _server.GetCommand("SELECT 1 FROM DataSet WHERE dataSetID=@dsID", conn); _server.AddParameterWithValueToCommand("@dsID", cmd, dataSetID); var found = cmd.ExecuteScalar(); if (found != null) { notifier.OnCheckPerformed(new CheckEventArgs("Found default dataset: " + dataSetID, CheckResult.Success, null)); return; } if (notifier.OnCheckPerformed(new CheckEventArgs("Did not find default dataset '" + dataSetID + "'.", CheckResult.Fail, null, "Create the dataset '" + dataSetID + "'"))) { cmd = _server.GetCommand("INSERT INTO DataSet (dataSetID, name) VALUES (@dsID, @dsID)", conn); _server.AddParameterWithValueToCommand("@dsID", cmd, dataSetID); cmd.ExecuteNonQuery(); } } }
private IEnumerable <string> GetForeignKeys(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); var r = server.GetCommand(@"select name from sys.foreign_keys where delete_referential_action = 0", con).ExecuteReader(); while (r.Read()) { yield return((string)r["name"]); } } }
public void SynchronizationTests_ColumnAdded(bool acceptChanges) { using (var con = _database.Server.GetConnection()) { con.Open(); _server.GetCommand("ALTER TABLE " + TABLE_NAME + " ADD Birthday datetime not null", con).ExecuteNonQuery(); } TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); if (acceptChanges) { //accept changes should result in a synchronized table Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length);//should 3 now } else { var ex = Assert.Throws <Exception>(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); Assert.AreEqual("The following columns are missing from the TableInfo:Birthday", ex.Message); } }
public void Update(UIDMapping mapping) { var table = _database.ExpectTable(_tableName); var sql = "UPDATE " + table.GetFullyQualifiedName() + " SET " + "PrivateUID = @PrivateUID, " + "ProjectNumber = @ProjectNumber, " + "UIDType = @UIDType, " + "IsExternalReference = @IsExternalReference " + "WHERE ReleaseUID = @ReleaseUID"; using (var conn = GetConnection()) { conn.Open(); var cmd = _server.GetCommand(sql, conn); _server.AddParameterWithValueToCommand("@PrivateUID", cmd, mapping.PrivateUID); _server.AddParameterWithValueToCommand("@ProjectNumber", cmd, mapping.ProjectNumber); _server.AddParameterWithValueToCommand("@UIDType", cmd, mapping.UIDType); _server.AddParameterWithValueToCommand("@IsExternalReference", cmd, mapping.IsExternalReference); _server.AddParameterWithValueToCommand("@ReleaseUID", cmd, mapping.ReleaseUID); cmd.ExecuteNonQuery(); } }
public override string GetSubstitutionFor(string toSwap, out string reason) { reason = null; // If the cached key matches, return the last value if (string.Equals(toSwap, _lastKey) && _lastVal != null) { _logger.Debug("Using cached swap value"); CacheHit++; Success++; return(_lastVal); } CacheMiss++; // Else fall through to the database lookup using (new TimeTracker(DatabaseStopwatch)) using (DbConnection con = _server.GetConnection()) { con.Open(); string sql = string.Format("SELECT {0} FROM {1} WHERE {2}=@val", _options.ReplacementColumnName, _swapTable.GetFullyQualifiedName(), _options.SwapColumnName); DbCommand cmd = _server.GetCommand(sql, con); _server.AddParameterWithValueToCommand("@val", cmd, toSwap); object result = cmd.ExecuteScalar(); if (result == DBNull.Value || result == null) { reason = "No match found for '" + toSwap + "'"; Fail++; return(null); } _lastKey = toSwap; _lastVal = result.ToString(); ++Success; return(_lastVal); } }
public void CreateDataset() { _server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
private IEnumerable <string> GetIndexes(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); var r = server.GetCommand(@"select si.name from sys.indexes si JOIN sys.objects so ON si.[object_id] = so.[object_id] WHERE so.type = 'U' AND is_primary_key = 0 and si.name is not null and so.name <> 'sysdiagrams'", con).ExecuteReader(); while (r.Read()) { yield return((string)r["name"]); } } }
protected override void SetUp() { base.SetUp(); _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); _server = _database.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = _database.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
public override Version GetVersion(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); using (var cmd = server.GetCommand("SHOW server_version", con)) { using (var r = cmd.ExecuteReader()) if (r.Read()) { return(r[0] == DBNull.Value ? null: CreateVersionFromString((string)r[0])); } else { return(null); } } } }
public override Version GetVersion(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); using (var cmd = server.GetCommand("SELECT * FROM v$version WHERE BANNER like 'Oracle Database%'", con)) { using (var r = cmd.ExecuteReader()) if (r.Read()) { return(r[0] == DBNull.Value ? null: CreateVersionFromString((string)r[0])); } else { return(null); } } } }
void ThrowIfDatabaseLock() { var serverCopy = new DiscoveredServer(new SqlConnectionStringBuilder(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder.ConnectionString)); serverCopy.ChangeDatabase("master"); using (var con = serverCopy.GetConnection()) { con.Open(); var r = serverCopy.GetCommand("exec sp_who2", con).ExecuteReader(); while (r.Read()) { if (r["DBName"].Equals(testDbName)) { object[] vals = new object[r.VisibleFieldCount]; r.GetValues(vals); throw new Exception("Someone is locking " + testDbName + ":" + Environment.NewLine + string.Join(",", vals)); } } } }
private int ExtractSQL(string sql, string tableName, DbConnection con) { int linesWritten; using (DbCommand cmdExtract = _server.GetCommand(sql, con)) { if (!Directory.Exists(_outputDirectory.FullName)) { Directory.CreateDirectory(_outputDirectory.FullName); } string filename = tableName.Replace("[", "").Replace("]", "").ToLower().Trim(); if (!filename.EndsWith(".csv")) { filename += ".csv"; } OutputFilename = Path.Combine(_outputDirectory.FullName, filename); StreamWriter sw = new StreamWriter(OutputFilename); cmdExtract.CommandTimeout = 500000; using (DbDataReader r = cmdExtract.ExecuteReader()) { WriteHeader(sw, r, _separator, _dateTimeFormat); linesWritten = WriteBody(sw, r, _separator, _dateTimeFormat); r.Close(); } sw.Flush(); sw.Close(); } return(linesWritten); }
private void FillTableWithQueryIfUserConsents(DataTable dt, string sql, ICheckNotifier checkNotifier, DiscoveredServer server) { bool execute = checkNotifier.OnCheckPerformed(new CheckEventArgs("About to fetch data, confirming user is happy with SQL", CheckResult.Warning, null, sql)); if (execute) { using (var con = server.GetConnection()) { con.Open(); using (var cmd = server.GetCommand(sql, con)) { cmd.CommandTimeout = _timeout; var da = server.GetDataAdapter(cmd); da.Fill(dt); } } } else { checkNotifier.OnCheckPerformed(new CheckEventArgs("User decided not to execute the SQL", CheckResult.Fail)); } }
public virtual string CreateTrigger(ICheckNotifier notifier, int timeout = 30) { if (!_primaryKeys.Any()) { throw new TriggerException("There must be at least 1 primary key"); } //if _Archive exists skip creating it bool skipCreatingArchive = _archiveTable.Exists(); //check _Archive does not already exist foreach (string forbiddenColumnName in new[] { "hic_validTo", "hic_userID", "hic_status" }) { if (_columns.Any(c => c.GetRuntimeName().Equals(forbiddenColumnName, StringComparison.CurrentCultureIgnoreCase))) { throw new TriggerException("Table " + _table + " already contains a column called " + forbiddenColumnName + " this column is reserved for Archiving"); } } bool b_mustCreate_validFrom = !_columns.Any(c => c.GetRuntimeName().Equals(SpecialFieldNames.ValidFrom, StringComparison.CurrentCultureIgnoreCase)); bool b_mustCreate_dataloadRunId = !_columns.Any(c => c.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID, StringComparison.CurrentCultureIgnoreCase)) && _createDataLoadRunIdAlso; //forces column order dataloadrunID then valid from (doesnt prevent these being in the wrong place in the record but hey ho - possibly not an issue anyway since probably the 3 values in the archive are what matters for order - see the Trigger which populates *,X,Y,Z where * is all columns in mane table if (b_mustCreate_dataloadRunId && !b_mustCreate_validFrom) { throw new TriggerException("Cannot create trigger because table contains " + SpecialFieldNames.ValidFrom + " but not " + SpecialFieldNames.DataLoadRunID + " (ID must be placed before valid from in column order)"); } //must add validFrom outside of transaction if we want SMO to pick it up if (b_mustCreate_dataloadRunId) { _table.AddColumn(SpecialFieldNames.DataLoadRunID, new DatabaseTypeRequest(typeof(int)), true, timeout); } var syntaxHelper = _server.GetQuerySyntaxHelper(); var dateTimeDatatype = syntaxHelper.TypeTranslater.GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(DateTime))); var nowFunction = syntaxHelper.GetScalarFunctionSql(MandatoryScalarFunctions.GetTodaysDate); //must add validFrom outside of transaction if we want SMO to pick it up if (b_mustCreate_validFrom) { _table.AddColumn(SpecialFieldNames.ValidFrom, string.Format(" {0} DEFAULT {1}", dateTimeDatatype, nowFunction), true, timeout); } //if we created columns we need to update _column if (b_mustCreate_dataloadRunId || b_mustCreate_validFrom) { _columns = _table.DiscoverColumns(); } string sql = WorkOutArchiveTableCreationSQL(); if (!skipCreatingArchive) { using (var con = _server.GetConnection()) { con.Open(); var cmdCreateArchive = _server.GetCommand(sql, con); cmdCreateArchive.ExecuteNonQuery(); _archiveTable.AddColumn("hic_validTo", new DatabaseTypeRequest(typeof(DateTime)), true, timeout); _archiveTable.AddColumn("hic_userID", new DatabaseTypeRequest(typeof(string), 128), true, timeout); _archiveTable.AddColumn("hic_status", new DatabaseTypeRequest(typeof(string), 1), true, timeout); } } return(sql); }
private int ExtractSQL(string sql, string tableName, DbConnection con) { int linesWritten; using (DbCommand cmdExtract = _server.GetCommand(sql, con)) { string filename = null; if (_outputDirectory != null) { if (!Directory.Exists(_outputDirectory.FullName)) { Directory.CreateDirectory(_outputDirectory.FullName); } filename = tableName.Replace("[", "").Replace("]", "").ToLower().Trim(); if (!filename.EndsWith(".csv")) { filename += ".csv"; } } StreamWriter sw; if (_stream != null) { sw = new StreamWriter(_stream); } else { if (_outputDirectory == null) { throw new Exception($"{nameof(_outputDirectory)} cannot be null when using file output mode (only with an explicit stream out)."); } if (filename == null) { throw new Exception($"{nameof(filename)} cannot be null when using file output mode (only with an explicit stream out)."); } OutputFilename = Path.Combine(_outputDirectory.FullName, filename); sw = new StreamWriter(OutputFilename); } cmdExtract.CommandTimeout = 500000; using (DbDataReader r = cmdExtract.ExecuteReader()) { WriteHeader(sw, r, _separator, _dateTimeFormat); linesWritten = WriteBody(sw, r, _separator, _dateTimeFormat, RoundFloatsTo); r.Close(); } sw.Flush(); sw.Close(); } return(linesWritten); }
public bool CreateDatabase(string createTablesAndFunctionsSql, string initialVersionNumber, ICheckNotifier notifier) { try { // The _builder has InitialCatalog set which will cause the pre-database creation connection to fail, so create one which doesn't contain InitialCatalog var serverBuilder = new SqlConnectionStringBuilder(_builder.ConnectionString) { InitialCatalog = "" }; DiscoveredServer server = new DiscoveredServer(serverBuilder); server.TestConnection(); var db = server.ExpectDatabase(_database); if (db.Exists())//make sure database does not already exist { bool createAnyway = notifier.OnCheckPerformed(new CheckEventArgs("Database already exists", CheckResult.Warning, null, "Attempt to create database inside existing database (will cause problems if the database is not empty)?")); if (!createAnyway) { throw new Exception("User chose not continue"); } } else { using (var con = server.GetConnection())//do it manually { con.Open(); server.GetCommand("CREATE DATABASE " + _database + (BinaryCollation?" COLLATE Latin1_General_BIN2":""), con).ExecuteNonQuery(); notifier.OnCheckPerformed(new CheckEventArgs("Database " + _database + " created", CheckResult.Success, null)); } } SqlConnection.ClearAllPools(); using (var con = db.Server.GetConnection()) { con.Open(); var cmd = db.Server.GetCommand("CREATE SCHEMA " + RoundhouseSchemaName, con); cmd.ExecuteNonQuery(); var sql = @"CREATE TABLE [RoundhousE].[ScriptsRun]( [id] [bigint] IDENTITY(1,1) NOT NULL, [version_id] [bigint] NULL, [script_name] [nvarchar](255) NULL, [text_of_script] [text] NULL, [text_hash] [nvarchar](512) NULL, [one_time_script] [bit] NULL, [entry_date] [datetime] NULL, [modified_date] [datetime] NULL, [entered_by] [nvarchar](50) NULL, PRIMARY KEY CLUSTERED ( [id] ASC ) ) CREATE TABLE [RoundhousE].[Version]( [id] [bigint] IDENTITY(1,1) NOT NULL, [repository_path] [nvarchar](255) NULL, [version] [nvarchar](50) NULL, [entry_date] [datetime] NULL, [modified_date] [datetime] NULL, [entered_by] [nvarchar](50) NULL, PRIMARY KEY CLUSTERED ( [id] ASC ) ) "; var cmd2 = db.Server.GetCommand(sql, con); cmd2.ExecuteNonQuery(); } RunSQL(db, createTablesAndFunctionsSql, InitialDatabaseScriptName); SetVersion(db, "Initial Setup", initialVersionNumber); notifier.OnCheckPerformed(new CheckEventArgs("Tables created", CheckResult.Success, null)); notifier.OnCheckPerformed(new CheckEventArgs("Setup Completed successfully", CheckResult.Success, null)); return(true); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Create failed", CheckResult.Fail, e)); return(false); } }
private void GetCohortAvailability(ExternalCohortTable source) { DiscoveredServer server = null; Exception ex = null; //it obviously hasn't been initialised properly yet if (string.IsNullOrWhiteSpace(source.Server) || string.IsNullOrWhiteSpace(source.Database)) { return; } try { server = DataAccessPortal.GetInstance().ExpectDatabase(source, DataAccessContext.DataExport).Server; } catch (Exception exception) { ex = exception; } if (server == null || !server.RespondsWithinTime(3, out ex) || !source.IsFullyPopulated()) { Blacklist(source, ex); return; } try { using (var con = server.GetConnection()) { con.Open(); //Get all of the project numbers and remote origin ids etc from the source in one query using (var cmd = server.GetCommand(source.GetExternalDataSql(), con)) { cmd.CommandTimeout = 120; using (var r = cmd.ExecuteReader()) { while (r.Read()) { //really should be only one here but still they might for some reason have 2 references to the same external cohort if (_cohortsByOriginId.TryGetValue(Convert.ToInt32(r["OriginID"]), out HashSet <ExtractableCohort> result)) { //Tell the cohorts what their external data values are so they don't have to fetch them themselves individually foreach (ExtractableCohort c in result.Where(c => c.ExternalCohortTable_ID == source.ID)) { //load external data from the result set var externalData = new ExternalCohortDefinitionData(r, source.Name); //tell the cohort about the data c.InjectKnown(externalData); lock (_oProjectNumberToCohortsDictionary) { //for performance also keep a dictionary of project number => compatible cohorts if (!ProjectNumberToCohortsDictionary.ContainsKey(externalData.ExternalProjectNumber)) { ProjectNumberToCohortsDictionary.Add(externalData.ExternalProjectNumber, new List <ExtractableCohort>()); } ProjectNumberToCohortsDictionary[externalData.ExternalProjectNumber].Add(c); } } } } } } } } catch (Exception e) { Blacklist(source, e); } }
public override int UploadImpl(DataTable dt) { //don't run an insert if there are 0 rows if (dt.Rows.Count == 0) { return(0); } var syntaxHelper = _server.GetQuerySyntaxHelper(); var tt = syntaxHelper.TypeTranslater; //if the column name is a reserved keyword e.g. "Comment" we need to give it a new name Dictionary <DataColumn, string> parameterNames = syntaxHelper.GetParameterNamesFor(dt.Columns.Cast <DataColumn>().ToArray(), c => c.ColumnName); int affectedRows = 0; var mapping = GetMapping(dt.Columns.Cast <DataColumn>()); var dateColumns = new HashSet <DataColumn>(); var sql = string.Format("INSERT INTO " + TargetTable.GetFullyQualifiedName() + "({0}) VALUES ({1})", string.Join(",", mapping.Values.Select(c => '"' + c.GetRuntimeName() + '"')), string.Join(",", mapping.Keys.Select(c => parameterNames[c])) ); using (OracleCommand cmd = (OracleCommand)_server.GetCommand(sql, Connection)) { //send all the data at once cmd.ArrayBindCount = dt.Rows.Count; foreach (var kvp in mapping) { var p = _server.AddParameterWithValueToCommand(parameterNames[kvp.Key], cmd, DBNull.Value); p.DbType = tt.GetDbTypeForSQLDBType(kvp.Value.DataType.SQLType); if (p.DbType == DbType.DateTime) { dateColumns.Add(kvp.Key); } } var values = new Dictionary <DataColumn, List <object> >(); foreach (DataColumn c in mapping.Keys) { values.Add(c, new List <object>()); } foreach (DataRow dataRow in dt.Rows) { //populate parameters for current row foreach (var col in mapping.Keys) { var val = dataRow[col]; if (val is string && string.IsNullOrWhiteSpace((string)val)) { val = null; } else if (val == null || val == DBNull.Value) { val = null; } else if (dateColumns.Contains(col)) { if (val is string s) { val = (DateTime)DateTimeDecider.Parse(s); } else { val = Convert.ToDateTime(dataRow[col]); } } values[col].Add(val); } } foreach (DataColumn col in mapping.Keys) { var param = cmd.Parameters[parameterNames[col]]; param.Value = values[col].ToArray(); } //send query affectedRows += cmd.ExecuteNonQuery(); } return(affectedRows); }
public override void GenerateReport(ICatalogue c, IDataLoadEventListener listener, CancellationToken cancellationToken) { SetupLogging(c.CatalogueRepository); var toDatabaseLogger = new ToLoggingDatabaseDataLoadEventListener(this, _logManager, _loggingTask, "DQE evaluation of " + c); var forker = new ForkDataLoadEventListener(listener, toDatabaseLogger); try { _catalogue = c; var dqeRepository = new DQERepository(c.CatalogueRepository); byPivotCategoryCubesOverTime.Add("ALL", new PeriodicityCubesOverTime("ALL")); byPivotRowStatesOverDataLoadRunId.Add("ALL", new DQEStateOverDataLoadRunId("ALL")); Check(new FromDataLoadEventListenerToCheckNotifier(forker)); var sw = Stopwatch.StartNew(); using (var con = _server.GetConnection()) { con.Open(); var cmd = _server.GetCommand(_queryBuilder.SQL, con); cmd.CommandTimeout = 500000; var t = cmd.ExecuteReaderAsync(cancellationToken); t.Wait(cancellationToken); if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException("User cancelled DQE while fetching data"); } var r = t.Result; int progress = 0; while (r.Read()) { cancellationToken.ThrowIfCancellationRequested(); progress++; int dataLoadRunIDOfCurrentRecord = 0; //to start with assume we will pass the results for the 'unknown batch' (where data load run ID is null or not available) //if the DataReader is likely to have a data load run ID column if (_containsDataLoadID) { //get data load run id int?runID = dqeRepository.ObjectToNullableInt(r[_dataLoadRunFieldName]); //if it has a value use it (otherwise it is null so use 0 - ugh I know, it's a primary key constraint issue) if (runID != null) { dataLoadRunIDOfCurrentRecord = (int)runID; } } string pivotValue = null; //if the user has a pivot category configured if (_pivotCategory != null) { pivotValue = GetStringValueForPivotField(r[_pivotCategory], forker); if (!haveComplainedAboutNullCategories && string.IsNullOrWhiteSpace(pivotValue)) { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found a null/empty value for pivot category '" + _pivotCategory + "', this record will ONLY be recorded under ALL and not it's specific category, you will not be warned of further nulls because there are likely to be many if there are any")); haveComplainedAboutNullCategories = true; pivotValue = null; } } //always increase the "ALL" category ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r, byPivotCategoryCubesOverTime["ALL"], byPivotRowStatesOverDataLoadRunId["ALL"]); //if there is a value in the current record for the pivot column if (pivotValue != null) { //if it is a novel if (!byPivotCategoryCubesOverTime.ContainsKey(pivotValue)) { //we will need to expand the dictionaries if (byPivotCategoryCubesOverTime.Keys.Count > MaximumPivotValues) { throw new OverflowException( "Encountered more than " + MaximumPivotValues + " values for the pivot column " + _pivotCategory + " this will result in crazy space usage since it is a multiplicative scale of DQE tesseracts"); } //expand both the time periodicity and the state results byPivotRowStatesOverDataLoadRunId.Add(pivotValue, new DQEStateOverDataLoadRunId(pivotValue)); byPivotCategoryCubesOverTime.Add(pivotValue, new PeriodicityCubesOverTime(pivotValue)); } //now we are sure that the dictionaries have the category field we can increment it ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r, byPivotCategoryCubesOverTime[pivotValue], byPivotRowStatesOverDataLoadRunId[pivotValue]); } if (progress % 5000 == 0) { forker.OnProgress(this, new ProgressEventArgs("Processing " + _catalogue, new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed)); } } //final value forker.OnProgress(this, new ProgressEventArgs("Processing " + _catalogue, new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed)); con.Close(); } sw.Stop(); foreach (var state in byPivotRowStatesOverDataLoadRunId.Values) { state.CalculateFinalValues(); } //now commit results using (var con = dqeRepository.BeginNewTransactedConnection()) { try { //mark down that we are beginning an evaluation on this the day of our lord etc... Evaluation evaluation = new Evaluation(dqeRepository, _catalogue); foreach (var state in byPivotRowStatesOverDataLoadRunId.Values) { state.CommitToDatabase(evaluation, _catalogue, con.Connection, con.Transaction); } if (_timePeriodicityField != null) { foreach (PeriodicityCubesOverTime periodicity in byPivotCategoryCubesOverTime.Values) { periodicity.CommitToDatabase(evaluation); } } con.ManagedTransaction.CommitAndCloseConnection(); } catch (Exception) { con.ManagedTransaction.AbandonAndCloseConnection(); throw; } } forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "CatalogueConstraintReport completed successfully and committed results to DQE server")); } catch (Exception e) { if (!(e is OperationCanceledException)) { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Fatal Crash", e)); } else { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DQE Execution Cancelled", e)); } } finally { toDatabaseLogger.FinalizeTableLoadInfos(); } }