public IHasFullyQualifiedNameToo GetLatestResultsTableUnsafe(AggregateConfiguration configuration, AggregateOperation operation, out string sql) { var syntax = _database.Server.GetQuerySyntaxHelper(); var mgrTable = _database.ExpectTable(ResultsManagerTable); using (var con = _server.GetConnection()) { con.Open(); using (var cmd = DatabaseCommandHelper.GetCommand( $@"Select {syntax.EnsureWrapped("TableName")}, {syntax.EnsureWrapped("SqlExecuted")} from {mgrTable.GetFullyQualifiedName()} WHERE {syntax.EnsureWrapped("AggregateConfiguration_ID")} = {configuration.ID} AND {syntax.EnsureWrapped("Operation")} = '{operation}'", con)) { using (var r = cmd.ExecuteReader()) if (r.Read()) { string tableName = r["TableName"].ToString(); sql = r["SqlExecuted"] as string; return(_database.ExpectTable(tableName)); } } } sql = null; return(null); }
private void CheckDatasetExists(ICheckNotifier notifier, string dataSetID) { using (var conn = _server.GetConnection()) { conn.Open(); var cmd = _server.GetCommand("SELECT 1 FROM DataSet WHERE dataSetID=@dsID", conn); _server.AddParameterWithValueToCommand("@dsID", cmd, dataSetID); var found = cmd.ExecuteScalar(); if (found != null) { notifier.OnCheckPerformed(new CheckEventArgs("Found default dataset: " + dataSetID, CheckResult.Success, null)); return; } if (notifier.OnCheckPerformed(new CheckEventArgs("Did not find default dataset '" + dataSetID + "'.", CheckResult.Fail, null, "Create the dataset '" + dataSetID + "'"))) { cmd = _server.GetCommand("INSERT INTO DataSet (dataSetID, name) VALUES (@dsID, @dsID)", conn); _server.AddParameterWithValueToCommand("@dsID", cmd, dataSetID); cmd.ExecuteNonQuery(); } } }
private void LoadDataTableAsync(DiscoveredServer server, string sql) { //it is already running and not completed if (_task != null && !_task.IsCompleted) { return; } HideFatal(); pbLoading.Visible = true; llCancel.Visible = true; _task = Task.Factory.StartNew(() => { int timeout = 1000; while (!IsHandleCreated && timeout > 0) { timeout -= 10; Thread.Sleep(10); } try { //then execute the command using (DbConnection con = server.GetConnection()) { con.Open(); _cmd = server.GetCommand(sql, con); _cmd.CommandTimeout = _timeoutControls.Timeout; DbDataAdapter a = server.GetDataAdapter(_cmd); DataTable dt = new DataTable(); a.Fill(dt); MorphBinaryColumns(dt); Invoke(new MethodInvoker(() => { dataGridView1.DataSource = dt; })); con.Close(); } } catch (Exception e) { ShowFatal(e); } finally { if (IsHandleCreated) { Invoke(new MethodInvoker(() => { pbLoading.Visible = false; llCancel.Visible = false; })); } } }); }
public override void DropDatabase(DiscoveredDatabase database) { bool userIsCurrentlyInDatabase = database.Server.GetCurrentDatabase().GetRuntimeName().Equals(database.GetRuntimeName()); var serverConnectionBuilder = new SqlConnectionStringBuilder(database.Server.Builder.ConnectionString); if (userIsCurrentlyInDatabase) { serverConnectionBuilder.InitialCatalog = "master"; } // Create a new server so we don't mutate database.Server and cause a whole lot of side-effects in other code, e.g. attachers var server = new DiscoveredServer(serverConnectionBuilder); var databaseToDrop = database.GetRuntimeName(); string sql = "ALTER DATABASE [" + databaseToDrop + "] SET SINGLE_USER WITH ROLLBACK IMMEDIATE" + Environment.NewLine; sql += "DROP DATABASE [" + databaseToDrop + "]"; using (var con = (SqlConnection)server.GetConnection()) { con.Open(); using (var cmd = new SqlCommand(sql, con)) cmd.ExecuteNonQuery(); } SqlConnection.ClearAllPools(); }
private DateTime GetMaxDate(DiscoveredServer server, IDataLoadEventListener listener) { DateTime dt; using (var con = server.GetConnection()) { con.Open(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to execute SQL to determine the maximum date for data loaded:" + ExecuteScalarSQL)); using (var cmd = server.GetCommand(ExecuteScalarSQL, con)) { var scalarValue = cmd.ExecuteScalar(); if (scalarValue == null || scalarValue == DBNull.Value) { throw new DataLoadProgressUpdateException("ExecuteScalarSQL specified for determining the maximum date of data loaded returned null when executed"); } try { dt = Convert.ToDateTime(scalarValue); } catch (Exception e) { throw new DataLoadProgressUpdateException("ExecuteScalarSQL specified for determining the maximum date of data loaded returned a value that was not a Date:" + scalarValue, e); } } return(dt); } }
public int DoExtraction() { int linesWritten = 0; using (var con = _server.GetConnection()) { con.Open(); if (_specificSQL != null) { linesWritten += ExtractSQL(_specificSQL, _specificSQLTableName, con); } if (_tableNames != null) { foreach (string table in _tableNames) { linesWritten += ExtractSQL("select * from " + table, table, con); } } con.Close(); } return(linesWritten); }
/// <summary> /// Looks up data stored in the Catalogue with a query matching on any of the provided uids. All values must be supplied if the Catalogue has a column of the corresponding name (i.e. if Catalogue has SeriesInstanceUID you must supply <paramref name="seriesuid"/>) /// </summary> /// <param name="studyuid"></param> /// <param name="seriesuid"></param> /// <param name="imageuid"></param> /// <returns></returns> public bool DoLookup(string studyuid, string seriesuid, string imageuid) { string sql = _queryBuilder.SQL; using (var con = _server.GetConnection()) { con.Open(); using (var cmd = _server.GetCommand(sql, con)) { //Add the current row UIDs to the parameters of the command if (_studyFilter != null) { _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultStudyIdColumnName, cmd, studyuid); } if (_seriesFilter != null) { _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultSeriesIdColumnName, cmd, seriesuid); } if (_instanceFilter != null) { _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultInstanceIdColumnName, cmd, imageuid); } using (var r = cmd.ExecuteReader()) { //if we can read a record then we have an entry in the blacklist return(r.Read()); } } } }
public IHasFullyQualifiedNameToo GetLatestResultsTableUnsafe(AggregateConfiguration configuration, AggregateOperation operation) { using (var con = _server.GetConnection()) { con.Open(); var r = DatabaseCommandHelper.GetCommand("Select TableName from CachedAggregateConfigurationResults WHERE AggregateConfiguration_ID = " + configuration.ID + " AND Operation = '" + operation + "'", con).ExecuteReader(); if (r.Read()) { string tableName = r["TableName"].ToString(); return(_database.ExpectTable(tableName)); } } return(null); }
public void CreateDataLoadTask(string taskName) { using (var con = _loggingServer.GetConnection()) { con.Open(); var datasetName = "Test_" + taskName; var datasetCmd = _loggingServer.GetCommand("INSERT INTO DataSet (dataSetID) VALUES ('" + datasetName + "')", con); datasetCmd.ExecuteNonQuery(); _sqlToCleanUp.Push("DELETE FROM DataSet WHERE dataSetID = '" + datasetName + "'"); var taskCmd = _loggingServer.GetCommand( "INSERT INTO DataLoadTask VALUES (100, '" + taskName + "', '" + taskName + "', GETDATE(), '" + datasetName + "', 1, 1, '" + datasetName + "')", con); taskCmd.ExecuteNonQuery(); _sqlToCleanUp.Push("DELETE FROM DataLoadTask WHERE dataSetID = '" + datasetName + "'"); } }
private void CleanupTruncateCommand() { var lds = new DiscoveredServer(UnitTestLoggingConnectionString); using (var con = lds.GetConnection()) { con.Open(); lds.GetCommand("DELETE FROM DataLoadTask where LOWER(dataSetID) like '%truncate%'", con).ExecuteNonQuery(); lds.GetCommand("DELETE FROM DataSet where LOWER(dataSetID) like '%truncate%'", con).ExecuteNonQuery(); } }
/// <summary> /// Update the database <paramref name="server"/> to redact the <paramref name="failure"/>. /// </summary> /// <param name="server">Where to connect to get the data, can be null if <see cref="RulesOnly"/> is true</param> /// <param name="failure">The failure to redact/create a rule for</param> /// <param name="usingRule">Pass null to create a new rule or give value to reuse an existing rule</param> public void Update(DiscoveredServer server, Failure failure, IsIdentifiableRule usingRule) { //theres no rule yet so create one (and add to RedList.yaml) if (usingRule == null) { usingRule = Add(failure, RuleAction.Report); } //if we are running in rules only mode we don't need to also update the database if (RulesOnly) { return; } var syntax = server.GetQuerySyntaxHelper(); //the fully specified name e.g. [mydb]..[mytbl] string tableName = failure.Resource; var tokens = tableName.Split('.', StringSplitOptions.RemoveEmptyEntries); var db = tokens.First(); tableName = tokens.Last(); if (string.IsNullOrWhiteSpace(db) || string.IsNullOrWhiteSpace(tableName) || string.Equals(db, tableName)) { throw new NotSupportedException($"Could not understand table name {failure.Resource}, maybe it is not full specified with a valid database and table name?"); } db = syntax.GetRuntimeName(db); tableName = syntax.GetRuntimeName(tableName); DiscoveredTable table = server.ExpectDatabase(db).ExpectTable(tableName); //if we've never seen this table before if (!_primaryKeys.ContainsKey(table)) { var pk = table.DiscoverColumns().SingleOrDefault(k => k.IsPrimaryKey); _primaryKeys.Add(table, pk); } using (var con = server.GetConnection()) { con.Open(); foreach (var sql in UpdateStrategy.GetUpdateSql(table, _primaryKeys, failure, usingRule)) { var cmd = server.GetCommand(sql, con); cmd.ExecuteNonQuery(); } } }
public void Example_TableCreation() { //Load implementation assemblies that are relevant to your application ImplementationManager.Load( typeof(FAnsi.Implementations.MicrosoftSQL.MicrosoftSQLImplementation).Assembly, typeof(FAnsi.Implementations.Oracle.OracleImplementation).Assembly, typeof(FAnsi.Implementations.MySql.MySqlImplementation).Assembly); //Create some test data DataTable dt = new DataTable(); dt.Columns.Add("Name"); dt.Columns.Add("DateOfBirth"); dt.Rows.Add("Frank", "2001-01-01"); dt.Rows.Add("Dave", "2001-01-01"); //Create a server object //var server = new DiscoveredServer(@"server=localhost\sqlexpress;Trusted_Connection=True;", DatabaseType.MicrosoftSQLServer); var server = new DiscoveredServer(@"Server=localhost;Uid=root;Pwd=zombie;SSL-Mode=None", DatabaseType.MySql); //Find the database var database = server.ExpectDatabase("FAnsiTests"); //Or create it if (!database.Exists()) { database.Create(); } //Create a table that can store the data in dt var table = database.CreateTable("MyTable", dt); //Table has 2 rows in it Console.WriteLine("Table {0} has {1} rows", table.GetFullyQualifiedName(), table.GetRowCount()); Console.WriteLine("Column Name is of type {0}", table.DiscoverColumn("Name").DataType.SQLType); Console.WriteLine("Column DateOfBirth is of type {0}", table.DiscoverColumn("DateOfBirth").DataType.SQLType); using (DbConnection con = server.GetConnection()) { con.Open(); DbCommand cmd = server.GetCommand("Select * from " + table.GetFullyQualifiedName(), con); DbDataReader r = cmd.ExecuteReader(); while (r.Read()) { Console.WriteLine(string.Join(",", r["Name"], r["DateOfBirth"])); } } //Drop the table afterwards table.Drop(); }
private IEnumerable <string> GetForeignKeys(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); var r = server.GetCommand(@"select name from sys.foreign_keys where delete_referential_action = 0", con).ExecuteReader(); while (r.Read()) { yield return((string)r["name"]); } } }
public override string GetSubstitutionFor(string toSwap, out string reason) { reason = null; // If the cached key matches, return the last value if (string.Equals(toSwap, _lastKey) && _lastVal != null) { _logger.Debug("Using cached swap value"); CacheHit++; Success++; return(_lastVal); } CacheMiss++; // Else fall through to the database lookup using (new TimeTracker(DatabaseStopwatch)) using (DbConnection con = _server.GetConnection()) { con.Open(); string sql = string.Format("SELECT {0} FROM {1} WHERE {2}=@val", _options.ReplacementColumnName, _swapTable.GetFullyQualifiedName(), _options.SwapColumnName); DbCommand cmd = _server.GetCommand(sql, con); _server.AddParameterWithValueToCommand("@val", cmd, toSwap); object result = cmd.ExecuteScalar(); if (result == DBNull.Value || result == null) { reason = "No match found for '" + toSwap + "'"; Fail++; return(null); } _lastKey = toSwap; _lastVal = result.ToString(); ++Success; return(_lastVal); } }
private void RecordNewDataLoadInDatabase(string dataLoadTaskName) { int parentTaskID = -1; using (var con = (SqlConnection)_server.GetConnection()) { con.Open(); SqlCommand cmd = new SqlCommand("SELECT ID FROM DataLoadTask WHERE name=@name", con); cmd.Parameters.Add("@name", SqlDbType.VarChar, 255); cmd.Parameters["@name"].Value = dataLoadTaskName; var result = cmd.ExecuteScalar(); if (result == null || result == DBNull.Value) { throw new Exception("Could not find data load task named:" + dataLoadTaskName); } //ID can come back as a decimal or an Int32 or an Int64 so whatever, just turn it into a string and then parse it parentTaskID = int.Parse(result.ToString()); cmd = new SqlCommand( @"INSERT INTO DataLoadRun (description,startTime,dataLoadTaskID,isTest,packageName,userAccount,suggestedRollbackCommand) VALUES (@description,@startTime,@dataLoadTaskID,@isTest,@packageName,@userAccount,@suggestedRollbackCommand); SELECT SCOPE_IDENTITY();", con); cmd.Parameters.Add("@description", SqlDbType.VarChar, -1); cmd.Parameters.Add("@startTime", SqlDbType.DateTime); cmd.Parameters.Add("@dataLoadTaskID", SqlDbType.Int); cmd.Parameters.Add("@isTest", SqlDbType.Bit); cmd.Parameters.Add("@packageName", SqlDbType.VarChar, 100); cmd.Parameters.Add("@userAccount", SqlDbType.VarChar, 50); cmd.Parameters.Add("@suggestedRollbackCommand", SqlDbType.VarChar, -1); cmd.Parameters["@description"].Value = _description; cmd.Parameters["@startTime"].Value = _startTime; cmd.Parameters["@dataLoadTaskID"].Value = parentTaskID; cmd.Parameters["@isTest"].Value = _isTest; cmd.Parameters["@packageName"].Value = _packageName; cmd.Parameters["@userAccount"].Value = _userAccount; cmd.Parameters["@suggestedRollbackCommand"].Value = _suggestedRollbackCommand ?? string.Empty; //ID can come back as a decimal or an Int32 or an Int64 so whatever, just turn it into a string and then parse it _id = int.Parse(cmd.ExecuteScalar().ToString()); } }
public void CreateDataset() { _server = DiscoveredDatabaseICanCreateRandomTablesIn.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = DiscoveredDatabaseICanCreateRandomTablesIn.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
public void SynchronizationTests_ColumnAddedWithCatalogue(bool acceptChanges) { ForwardEngineerCatalogue cataEngineer = new ForwardEngineerCatalogue(tableInfoCreated, columnInfosCreated, true); Catalogue cata; CatalogueItem[] cataItems; ExtractionInformation[] extractionInformations; cataEngineer.ExecuteForwardEngineering(out cata, out cataItems, out extractionInformations); try { Assert.AreEqual(TABLE_NAME, cata.Name); Assert.AreEqual(2, cataItems.Length); Assert.AreEqual(2, extractionInformations.Length); using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("ALTER TABLE " + TABLE_NAME + " ADD Birthday datetime not null", con).ExecuteNonQuery(); } TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); if (acceptChanges) { //accept changes should result in a synchronized table Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length); //should 3 now Assert.AreEqual(3, cata.CatalogueItems.Length); //should 3 now Assert.AreEqual(3, cata.GetAllExtractionInformation(ExtractionCategory.Any).Length); //should 3 now Assert.AreEqual(1, cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(e => e.SelectSQL.Contains("Birthday"))); Assert.AreEqual(1, cata.CatalogueItems.Count(ci => ci.Name.Contains("Birthday"))); } else { var ex = Assert.Throws <Exception>(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); Assert.AreEqual("The following columns are missing from the TableInfo:Birthday", ex.Message); } } finally { cata.DeleteInDatabase(); } }
internal ManagedConnection(DiscoveredServer discoveredServer, IManagedTransaction managedTransaction) { //get a new connection or use the existing one within the transaction Connection = discoveredServer.GetConnection(managedTransaction); //if there is a transaction, also store the transaction ManagedTransaction = managedTransaction; Transaction = managedTransaction != null ? managedTransaction.Transaction : null; //if there isn't a transaction then we opened a new connection so we had better remember to close it again if (managedTransaction == null) { CloseOnDispose = true; Debug.Assert(Connection.State == ConnectionState.Closed); Connection.Open(); } }
private IEnumerable <string> GetIndexes(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); var r = server.GetCommand(@"select si.name from sys.indexes si JOIN sys.objects so ON si.[object_id] = so.[object_id] WHERE so.type = 'U' AND is_primary_key = 0 and si.name is not null and so.name <> 'sysdiagrams'", con).ExecuteReader(); while (r.Read()) { yield return((string)r["name"]); } } }
protected override void SetUp() { base.SetUp(); _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); _server = _database.Server; using (var con = _server.GetConnection()) { con.Open(); _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))", con).ExecuteNonQuery(); } var tbl = _database.ExpectTable("TableInfoSynchronizerTests"); TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); importer.DoImport(out tableInfoCreated, out columnInfosCreated); }
public override Version GetVersion(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); using (var cmd = server.GetCommand("SHOW server_version", con)) { using (var r = cmd.ExecuteReader()) if (r.Read()) { return(r[0] == DBNull.Value ? null: CreateVersionFromString((string)r[0])); } else { return(null); } } } }
public override Version GetVersion(DiscoveredServer server) { using (var con = server.GetConnection()) { con.Open(); using (var cmd = server.GetCommand("SELECT * FROM v$version WHERE BANNER like 'Oracle Database%'", con)) { using (var r = cmd.ExecuteReader()) if (r.Read()) { return(r[0] == DBNull.Value ? null: CreateVersionFromString((string)r[0])); } else { return(null); } } } }
void ThrowIfDatabaseLock() { var serverCopy = new DiscoveredServer(new SqlConnectionStringBuilder(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder.ConnectionString)); serverCopy.ChangeDatabase("master"); using (var con = serverCopy.GetConnection()) { con.Open(); var r = serverCopy.GetCommand("exec sp_who2", con).ExecuteReader(); while (r.Read()) { if (r["DBName"].Equals(testDbName)) { object[] vals = new object[r.VisibleFieldCount]; r.GetValues(vals); throw new Exception("Someone is locking " + testDbName + ":" + Environment.NewLine + string.Join(",", vals)); } } } }
private void FillTableWithQueryIfUserConsents(DataTable dt, string sql, ICheckNotifier checkNotifier, DiscoveredServer server) { bool execute = checkNotifier.OnCheckPerformed(new CheckEventArgs("About to fetch data, confirming user is happy with SQL", CheckResult.Warning, null, sql)); if (execute) { using (var con = server.GetConnection()) { con.Open(); using (var cmd = server.GetCommand(sql, con)) { cmd.CommandTimeout = _timeout; var da = server.GetDataAdapter(cmd); da.Fill(dt); } } } else { checkNotifier.OnCheckPerformed(new CheckEventArgs("User decided not to execute the SQL", CheckResult.Fail)); } }
public virtual string CreateTrigger(ICheckNotifier notifier, int timeout = 30) { if (!_primaryKeys.Any()) { throw new TriggerException("There must be at least 1 primary key"); } //if _Archive exists skip creating it bool skipCreatingArchive = _archiveTable.Exists(); //check _Archive does not already exist foreach (string forbiddenColumnName in new[] { "hic_validTo", "hic_userID", "hic_status" }) { if (_columns.Any(c => c.GetRuntimeName().Equals(forbiddenColumnName, StringComparison.CurrentCultureIgnoreCase))) { throw new TriggerException("Table " + _table + " already contains a column called " + forbiddenColumnName + " this column is reserved for Archiving"); } } bool b_mustCreate_validFrom = !_columns.Any(c => c.GetRuntimeName().Equals(SpecialFieldNames.ValidFrom, StringComparison.CurrentCultureIgnoreCase)); bool b_mustCreate_dataloadRunId = !_columns.Any(c => c.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID, StringComparison.CurrentCultureIgnoreCase)) && _createDataLoadRunIdAlso; //forces column order dataloadrunID then valid from (doesnt prevent these being in the wrong place in the record but hey ho - possibly not an issue anyway since probably the 3 values in the archive are what matters for order - see the Trigger which populates *,X,Y,Z where * is all columns in mane table if (b_mustCreate_dataloadRunId && !b_mustCreate_validFrom) { throw new TriggerException("Cannot create trigger because table contains " + SpecialFieldNames.ValidFrom + " but not " + SpecialFieldNames.DataLoadRunID + " (ID must be placed before valid from in column order)"); } //must add validFrom outside of transaction if we want SMO to pick it up if (b_mustCreate_dataloadRunId) { _table.AddColumn(SpecialFieldNames.DataLoadRunID, new DatabaseTypeRequest(typeof(int)), true, timeout); } var syntaxHelper = _server.GetQuerySyntaxHelper(); var dateTimeDatatype = syntaxHelper.TypeTranslater.GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(DateTime))); var nowFunction = syntaxHelper.GetScalarFunctionSql(MandatoryScalarFunctions.GetTodaysDate); //must add validFrom outside of transaction if we want SMO to pick it up if (b_mustCreate_validFrom) { _table.AddColumn(SpecialFieldNames.ValidFrom, string.Format(" {0} DEFAULT {1}", dateTimeDatatype, nowFunction), true, timeout); } //if we created columns we need to update _column if (b_mustCreate_dataloadRunId || b_mustCreate_validFrom) { _columns = _table.DiscoverColumns(); } string sql = WorkOutArchiveTableCreationSQL(); if (!skipCreatingArchive) { using (var con = _server.GetConnection()) { con.Open(); var cmdCreateArchive = _server.GetCommand(sql, con); cmdCreateArchive.ExecuteNonQuery(); _archiveTable.AddColumn("hic_validTo", new DatabaseTypeRequest(typeof(DateTime)), true, timeout); _archiveTable.AddColumn("hic_userID", new DatabaseTypeRequest(typeof(string), 128), true, timeout); _archiveTable.AddColumn("hic_status", new DatabaseTypeRequest(typeof(string), 1), true, timeout); } } return(sql); }
private DataTable GetSubstitutionsForANOEquivalents(DataTable table, bool previewOnly) { using (var con = (SqlConnection)_server.GetConnection()) { con.InfoMessage += _con_InfoMessage; if (table.Rows.Count == 0) { return(table); } try { SqlTransaction transaction = null; if (previewOnly) { bool mustPush = !_anoTable.IsTablePushed(); con.Open(); transaction = con.BeginTransaction();//if it is preview only we will use a transaction which we will then rollback if (mustPush) { var cSharpType = new DatabaseTypeRequest(table.Columns[0].DataType, _anoTable.NumberOfIntegersToUseInAnonymousRepresentation + _anoTable.NumberOfCharactersToUseInAnonymousRepresentation); //we want to use this syntax var syntaxHelper = _server.Helper.GetQuerySyntaxHelper(); //push to the destination server _anoTable.PushToANOServerAsNewTable( //turn the csharp type into an SQL type e.g. string 30 becomes varchar(30) syntaxHelper.TypeTranslater.GetSQLDBTypeForCSharpType(cSharpType), new ThrowImmediatelyCheckNotifier(), con, transaction); } } string substituteForANOIdentifiersProc = SubstitutionStoredprocedure; SqlCommand cmdSubstituteIdentifiers = new SqlCommand(substituteForANOIdentifiersProc, con); cmdSubstituteIdentifiers.CommandType = CommandType.StoredProcedure; cmdSubstituteIdentifiers.CommandTimeout = 500; cmdSubstituteIdentifiers.Transaction = transaction; cmdSubstituteIdentifiers.Parameters.Add("@batch", SqlDbType.Structured); cmdSubstituteIdentifiers.Parameters.Add("@tableName", SqlDbType.VarChar, 500); cmdSubstituteIdentifiers.Parameters.Add("@numberOfIntegersToUseInAnonymousRepresentation", SqlDbType.Int); cmdSubstituteIdentifiers.Parameters.Add("@numberOfCharactersToUseInAnonymousRepresentation", SqlDbType.Int); cmdSubstituteIdentifiers.Parameters.Add("@suffix", SqlDbType.VarChar, 10); //table valued parameter cmdSubstituteIdentifiers.Parameters["@batch"].TypeName = "dbo.Batch"; cmdSubstituteIdentifiers.Parameters["@batch"].Value = table; cmdSubstituteIdentifiers.Parameters["@tableName"].Value = _anoTable.TableName; cmdSubstituteIdentifiers.Parameters["@numberOfIntegersToUseInAnonymousRepresentation"].Value = _anoTable.NumberOfIntegersToUseInAnonymousRepresentation; cmdSubstituteIdentifiers.Parameters["@numberOfCharactersToUseInAnonymousRepresentation"].Value = _anoTable.NumberOfCharactersToUseInAnonymousRepresentation; cmdSubstituteIdentifiers.Parameters["@suffix"].Value = _anoTable.Suffix; SqlDataAdapter da = new SqlDataAdapter(cmdSubstituteIdentifiers); DataTable dtToReturn = new DataTable(); da.Fill(dtToReturn); if (previewOnly) { transaction.Rollback(); } return(dtToReturn); } catch (Exception e) { throw new Exception(SubstitutionStoredprocedure + " failed to complete correctly: " + e); } } }
private void RecordNewTableLoadInDatabase(DataLoadInfo parent, string destinationTable, DataSource[] sources, int expectedInserts) { using (var con = (SqlConnection)_databaseSettings.GetConnection()) using (var cmd = new SqlCommand("INSERT INTO TableLoadRun (startTime,dataLoadRunID,targetTable,expectedInserts,suggestedRollbackCommand) " + "VALUES (@startTime,@dataLoadRunID,@targetTable,@expectedInserts,@suggestedRollbackCommand); " + "SELECT SCOPE_IDENTITY();", con)) { con.Open(); cmd.Parameters.Add("@startTime", SqlDbType.DateTime); cmd.Parameters.Add("@dataLoadRunID", SqlDbType.Int); cmd.Parameters.Add("@targetTable", SqlDbType.VarChar, 200); cmd.Parameters.Add("@expectedInserts", SqlDbType.BigInt); cmd.Parameters.Add("@suggestedRollbackCommand", SqlDbType.VarChar, -1); cmd.Parameters["@startTime"].Value = DateTime.Now; cmd.Parameters["@dataLoadRunID"].Value = parent.ID; cmd.Parameters["@targetTable"].Value = destinationTable; cmd.Parameters["@expectedInserts"].Value = expectedInserts; cmd.Parameters["@suggestedRollbackCommand"].Value = _suggestedRollbackCommand; //get the ID, can come back as a decimal or an Int32 or an Int64 so whatever, just turn it into a string and then parse it _id = int.Parse(cmd.ExecuteScalar().ToString()); //keep a record of all data sources DataSources = sources; //for each of the sources, create them in the DataSource table foreach (DataSource s in DataSources) { using (var cmdInsertDs = new SqlCommand("INSERT INTO DataSource (source,tableLoadRunID,originDate,MD5) " + "VALUES (@source,@tableLoadRunID,@originDate,@MD5); SELECT SCOPE_IDENTITY();", con)) { cmdInsertDs.Parameters.Add("@source", SqlDbType.VarChar, -1); cmdInsertDs.Parameters.Add("@tableLoadRunID", SqlDbType.Int); cmdInsertDs.Parameters.Add("@originDate", SqlDbType.Date); cmdInsertDs.Parameters.Add("@MD5", SqlDbType.Binary, 128); cmdInsertDs.Parameters["@source"].Value = s.Source; cmdInsertDs.Parameters["@tableLoadRunID"].Value = _id; if (s.UnknownOriginDate) { cmdInsertDs.Parameters["@originDate"].Value = DBNull.Value; } else { cmdInsertDs.Parameters["@originDate"].Value = s.OriginDate; } if (s.MD5 != null) { cmdInsertDs.Parameters["@MD5"].Value = s.MD5; } else { cmdInsertDs.Parameters["@MD5"].Value = DBNull.Value; } s.ID = int.Parse(cmdInsertDs.ExecuteScalar().ToString()); } } } }
private void GetCohortAvailability(ExternalCohortTable source) { DiscoveredServer server = null; Exception ex = null; //it obviously hasn't been initialised properly yet if (string.IsNullOrWhiteSpace(source.Server) || string.IsNullOrWhiteSpace(source.Database)) { return; } try { server = DataAccessPortal.GetInstance().ExpectDatabase(source, DataAccessContext.DataExport).Server; } catch (Exception exception) { ex = exception; } if (server == null || !server.RespondsWithinTime(3, out ex) || !source.IsFullyPopulated()) { Blacklist(source, ex); return; } try { using (var con = server.GetConnection()) { con.Open(); //Get all of the project numbers and remote origin ids etc from the source in one query using (var cmd = server.GetCommand(source.GetExternalDataSql(), con)) { cmd.CommandTimeout = 120; using (var r = cmd.ExecuteReader()) { while (r.Read()) { //really should be only one here but still they might for some reason have 2 references to the same external cohort if (_cohortsByOriginId.TryGetValue(Convert.ToInt32(r["OriginID"]), out HashSet <ExtractableCohort> result)) { //Tell the cohorts what their external data values are so they don't have to fetch them themselves individually foreach (ExtractableCohort c in result.Where(c => c.ExternalCohortTable_ID == source.ID)) { //load external data from the result set var externalData = new ExternalCohortDefinitionData(r, source.Name); //tell the cohort about the data c.InjectKnown(externalData); lock (_oProjectNumberToCohortsDictionary) { //for performance also keep a dictionary of project number => compatible cohorts if (!ProjectNumberToCohortsDictionary.ContainsKey(externalData.ExternalProjectNumber)) { ProjectNumberToCohortsDictionary.Add(externalData.ExternalProjectNumber, new List <ExtractableCohort>()); } ProjectNumberToCohortsDictionary[externalData.ExternalProjectNumber].Add(c); } } } } } } } } catch (Exception e) { Blacklist(source, e); } }
public override void GenerateReport(ICatalogue c, IDataLoadEventListener listener, CancellationToken cancellationToken) { SetupLogging(c.CatalogueRepository); var toDatabaseLogger = new ToLoggingDatabaseDataLoadEventListener(this, _logManager, _loggingTask, "DQE evaluation of " + c); var forker = new ForkDataLoadEventListener(listener, toDatabaseLogger); try { _catalogue = c; var dqeRepository = new DQERepository(c.CatalogueRepository); byPivotCategoryCubesOverTime.Add("ALL", new PeriodicityCubesOverTime("ALL")); byPivotRowStatesOverDataLoadRunId.Add("ALL", new DQEStateOverDataLoadRunId("ALL")); Check(new FromDataLoadEventListenerToCheckNotifier(forker)); var sw = Stopwatch.StartNew(); using (var con = _server.GetConnection()) { con.Open(); var cmd = _server.GetCommand(_queryBuilder.SQL, con); cmd.CommandTimeout = 500000; var t = cmd.ExecuteReaderAsync(cancellationToken); t.Wait(cancellationToken); if (cancellationToken.IsCancellationRequested) { throw new OperationCanceledException("User cancelled DQE while fetching data"); } var r = t.Result; int progress = 0; while (r.Read()) { cancellationToken.ThrowIfCancellationRequested(); progress++; int dataLoadRunIDOfCurrentRecord = 0; //to start with assume we will pass the results for the 'unknown batch' (where data load run ID is null or not available) //if the DataReader is likely to have a data load run ID column if (_containsDataLoadID) { //get data load run id int?runID = dqeRepository.ObjectToNullableInt(r[_dataLoadRunFieldName]); //if it has a value use it (otherwise it is null so use 0 - ugh I know, it's a primary key constraint issue) if (runID != null) { dataLoadRunIDOfCurrentRecord = (int)runID; } } string pivotValue = null; //if the user has a pivot category configured if (_pivotCategory != null) { pivotValue = GetStringValueForPivotField(r[_pivotCategory], forker); if (!haveComplainedAboutNullCategories && string.IsNullOrWhiteSpace(pivotValue)) { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Found a null/empty value for pivot category '" + _pivotCategory + "', this record will ONLY be recorded under ALL and not it's specific category, you will not be warned of further nulls because there are likely to be many if there are any")); haveComplainedAboutNullCategories = true; pivotValue = null; } } //always increase the "ALL" category ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r, byPivotCategoryCubesOverTime["ALL"], byPivotRowStatesOverDataLoadRunId["ALL"]); //if there is a value in the current record for the pivot column if (pivotValue != null) { //if it is a novel if (!byPivotCategoryCubesOverTime.ContainsKey(pivotValue)) { //we will need to expand the dictionaries if (byPivotCategoryCubesOverTime.Keys.Count > MaximumPivotValues) { throw new OverflowException( "Encountered more than " + MaximumPivotValues + " values for the pivot column " + _pivotCategory + " this will result in crazy space usage since it is a multiplicative scale of DQE tesseracts"); } //expand both the time periodicity and the state results byPivotRowStatesOverDataLoadRunId.Add(pivotValue, new DQEStateOverDataLoadRunId(pivotValue)); byPivotCategoryCubesOverTime.Add(pivotValue, new PeriodicityCubesOverTime(pivotValue)); } //now we are sure that the dictionaries have the category field we can increment it ProcessRecord(dqeRepository, dataLoadRunIDOfCurrentRecord, r, byPivotCategoryCubesOverTime[pivotValue], byPivotRowStatesOverDataLoadRunId[pivotValue]); } if (progress % 5000 == 0) { forker.OnProgress(this, new ProgressEventArgs("Processing " + _catalogue, new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed)); } } //final value forker.OnProgress(this, new ProgressEventArgs("Processing " + _catalogue, new ProgressMeasurement(progress, ProgressType.Records), sw.Elapsed)); con.Close(); } sw.Stop(); foreach (var state in byPivotRowStatesOverDataLoadRunId.Values) { state.CalculateFinalValues(); } //now commit results using (var con = dqeRepository.BeginNewTransactedConnection()) { try { //mark down that we are beginning an evaluation on this the day of our lord etc... Evaluation evaluation = new Evaluation(dqeRepository, _catalogue); foreach (var state in byPivotRowStatesOverDataLoadRunId.Values) { state.CommitToDatabase(evaluation, _catalogue, con.Connection, con.Transaction); } if (_timePeriodicityField != null) { foreach (PeriodicityCubesOverTime periodicity in byPivotCategoryCubesOverTime.Values) { periodicity.CommitToDatabase(evaluation); } } con.ManagedTransaction.CommitAndCloseConnection(); } catch (Exception) { con.ManagedTransaction.AbandonAndCloseConnection(); throw; } } forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "CatalogueConstraintReport completed successfully and committed results to DQE server")); } catch (Exception e) { if (!(e is OperationCanceledException)) { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Fatal Crash", e)); } else { forker.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "DQE Execution Cancelled", e)); } } finally { toDatabaseLogger.FinalizeTableLoadInfos(); } }
/// <summary> /// Creates a new transaction and does one line at a time bulk insertions of the <paramref name="insert"/> to determine which line (and value) /// is causing the problem. Transaction is always rolled back. /// /// </summary> /// <param name="e"></param> /// <param name="insert"></param> /// <param name="dt"></param> /// <param name="serverForLineByLineInvestigation"></param> /// <returns></returns> private Exception AttemptLineByLineInsert(Exception e, SqlBulkCopy insert, DataTable dt, DiscoveredServer serverForLineByLineInvestigation) { int line = 1; string firstPass = ExceptionToListOfInnerMessages(e, true); firstPass = firstPass.Replace(Environment.NewLine, Environment.NewLine + "\t"); firstPass = Environment.NewLine + SR.MicrosoftSQLBulkCopy_AttemptLineByLineInsert_First_Pass_Exception_ + Environment.NewLine + firstPass; //have to use a new object because current one could have a broken transaction associated with it using (var con = (SqlConnection)serverForLineByLineInvestigation.GetConnection()) { con.Open(); SqlTransaction investigationTransaction = con.BeginTransaction("Investigate BulkCopyFailure"); using (SqlBulkCopy investigationOneLineAtATime = new SqlBulkCopy(con, SqlBulkCopyOptions.KeepIdentity, investigationTransaction) { DestinationTableName = insert.DestinationTableName }) { foreach (SqlBulkCopyColumnMapping m in insert.ColumnMappings) { investigationOneLineAtATime.ColumnMappings.Add(m); } //try a line at a time foreach (DataRow dr in dt.Rows) { try { investigationOneLineAtATime.WriteToServer(new[] { dr }); //try one line line++; } catch (Exception exception) { if (BcpColIdToString(investigationOneLineAtATime, exception as SqlException, out string result, out SqlBulkCopyColumnMapping badMapping)) { if (dt.Columns.Contains(badMapping.SourceColumn)) { var sourceValue = dr[badMapping.SourceColumn]; var destColumn = base.TargetTableColumns.SingleOrDefault(c => c.GetRuntimeName().Equals(badMapping.DestinationColumn)); if (destColumn != null) { return(new FileLoadException( string.Format(SR.MicrosoftSQLBulkCopy_AttemptLineByLineInsert_BulkInsert_failed_on_data_row__0__the_complaint_was_about_source_column____1____which_had_value____2____destination_data_type_was____3____4__5_, line, badMapping.SourceColumn, sourceValue, destColumn.DataType, Environment.NewLine, result), exception)); } } return(new Exception(string.Format(SR.MicrosoftSQLBulkCopy_AttemptLineByLineInsert_BulkInsert_failed_on_data_row__0___1_, line, result), e)); } return(new FileLoadException( string.Format(SR.MicrosoftSQLBulkCopy_AttemptLineByLineInsert_Second_Pass_Exception__Failed_to_load_data_row__0__the_following_values_were_rejected_by_the_database___1__2__3_, line, Environment.NewLine, string.Join(Environment.NewLine, dr.ItemArray), firstPass), exception)); } } //it worked... how!? investigationTransaction.Rollback(); con.Close(); } return(new Exception(SR.MicrosoftSQLBulkCopy_AttemptLineByLineInsert_Second_Pass_Exception__Bulk_insert_failed_but_when_we_tried_to_repeat_it_a_line_at_a_time_it_worked + firstPass, e)); } }