private void RefreshUIFromDatabase() { try { _server = DataAccessPortal.GetInstance() .ExpectServer(_collection.GetDataAccessPoint(), DataAccessContext.InternalDataProcessing); string sql = _collection.GetSql(); _originalSql = sql; //update the editor to show the user the SQL _scintilla.Text = sql; _server.TestConnection(); LoadDataTableAsync(_server, sql); } catch (QueryBuildingException ex) { ragSmiley1.SetVisible(true); ragSmiley1.Fatal(ex); } catch (Exception ex) { ragSmiley1.SetVisible(true); ragSmiley1.Fatal(ex); } }
/// <summary> /// Gets an empty database on the test server of the appropriate DBMS /// </summary> /// <param name="type">The DBMS you want a server of (a valid connection string must exist in TestDatabases.txt)</param> /// <param name="dbnName">null for default test database name (recommended unless you are testing moving data from one database to another on the same test server)</param> /// <param name="server"></param> /// <param name="database"></param> /// <param name="justDropTablesIfPossible">Determines behaviour when the test database already exists. False to drop and recreate it. True to just drop tables (faster)</param> /// <returns></returns> protected DiscoveredDatabase GetCleanedServer(DatabaseType type, string dbnName, out DiscoveredServer server, out DiscoveredDatabase database, bool justDropTablesIfPossible = false) { switch (type) { case DatabaseType.MicrosoftSQLServer: server = new DiscoveredServer(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder); break; case DatabaseType.MySql: server = _discoveredMySqlServer == null ? null : new DiscoveredServer(_discoveredMySqlServer.Builder); break; case DatabaseType.Oracle: server = _discoveredOracleServer == null ? null : new DiscoveredServer(_discoveredOracleServer.Builder); break; default: throw new ArgumentOutOfRangeException("type"); } if (server == null) { Assert.Inconclusive(); } //the microsoft one should exist! others are optional if (!server.Exists() && type != DatabaseType.MicrosoftSQLServer) { Assert.Inconclusive(); } server.TestConnection(); database = server.ExpectDatabase(dbnName); if (justDropTablesIfPossible && database.Exists()) { foreach (var t in database.DiscoverTables(true)) { t.Drop(); } foreach (var t in database.DiscoverTableValuedFunctions()) { t.Drop(); } } else { database.Create(true); } server.ChangeDatabase(dbnName); Assert.IsTrue(database.Exists()); return(database); }
/// <summary> /// /// </summary> /// <exception cref="SynchronizationFailedException">Could not figure out how to resolve a synchronization problem between the TableInfo and the underlying table structure</exception> /// <param name="notifier">Called every time a fixable problem is detected, method must return true or false. True = apply fix, False = don't - but carry on checking</param> public bool Synchronize(ICheckNotifier notifier) { bool IsSynched = true; //server exists and is accessible? try { _toSyncTo.TestConnection(); } catch (Exception e) { throw new SynchronizationFailedException("Could not connect to " + _toSyncTo, e); } //database exists? var expectedDatabase = _toSyncTo.ExpectDatabase(_tableToSync.GetDatabaseRuntimeName()); if (!expectedDatabase.Exists()) { throw new SynchronizationFailedException("Server did not contain a database called " + _tableToSync.GetDatabaseRuntimeName()); } //identify new columns DiscoveredColumn[] liveColumns; DiscoveredTable expectedTable; if (_tableToSync.IsTableValuedFunction) { expectedTable = expectedDatabase.ExpectTableValuedFunction(_tableToSync.GetRuntimeName(), _tableToSync.Schema); if (!expectedTable.Exists()) { throw new SynchronizationFailedException("Database " + expectedDatabase + " did not contain a TABLE VALUED FUNCTION called " + _tableToSync.GetRuntimeName()); } } else { //table exists? expectedTable = expectedDatabase.ExpectTable(_tableToSync.GetRuntimeName(), _tableToSync.Schema, _tableToSync.IsView ? TableType.View:TableType.Table); if (!expectedTable.Exists()) { throw new SynchronizationFailedException("Database " + expectedDatabase + " did not contain a table called " + _tableToSync.GetRuntimeName()); } } try { liveColumns = expectedTable.DiscoverColumns(); } catch (SqlException e) { throw new Exception("Failed to enumerate columns in " + _toSyncTo + " (we were attempting to synchronize the TableInfo " + _tableToSync + " (ID=" + _tableToSync.ID + "). Check the inner exception for specifics", e); } ColumnInfo[] catalogueColumns = _tableToSync.ColumnInfos.ToArray(); IDataAccessCredentials credentialsIfExists = _tableToSync.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); string pwd = null; string usr = null; if (credentialsIfExists != null) { usr = credentialsIfExists.Username; pwd = credentialsIfExists.GetDecryptedPassword(); } ITableInfoImporter importer; //for importing new stuff if (_tableToSync.IsTableValuedFunction) { importer = new TableValuedFunctionImporter(_repository, (DiscoveredTableValuedFunction)expectedTable); } else { importer = new TableInfoImporter(_repository, _toSyncTo.Name, _toSyncTo.GetCurrentDatabase().GetRuntimeName(), _tableToSync.GetRuntimeName(), _tableToSync.DatabaseType, username: usr, password: pwd, importFromSchema: _tableToSync.Schema, importTableType: _tableToSync.IsView ? TableType.View:TableType.Table); } DiscoveredColumn[] newColumnsInLive = liveColumns.Where( live => !catalogueColumns.Any(columnInfo => columnInfo.GetRuntimeName() .Equals(live.GetRuntimeName()))).ToArray(); //there are new columns in the live database that are not in the Catalogue if (newColumnsInLive.Any()) { //see if user wants to add missing columns bool addMissingColumns = notifier.OnCheckPerformed(new CheckEventArgs("The following columns are missing from the TableInfo:" + string.Join(",", newColumnsInLive.Select(c => c.GetRuntimeName())), CheckResult.Fail, null, "The ColumnInfos will be created and added to the TableInfo")); List <ColumnInfo> added = new List <ColumnInfo>(); if (addMissingColumns) { foreach (DiscoveredColumn missingColumn in newColumnsInLive) { added.Add(importer.CreateNewColumnInfo(_tableToSync, missingColumn)); } ForwardEngineerExtractionInformationIfAppropriate(added, notifier); } else { IsSynched = false; } } //See if we need to delete any ColumnInfos ColumnInfo[] columnsInCatalogueButSinceDisapeared = catalogueColumns .Where(columnInfo => !liveColumns.Any( //there are not any c => columnInfo.GetRuntimeName().Equals(c.GetRuntimeName())) //columns with the same name between discovery/columninfo ).ToArray(); if (columnsInCatalogueButSinceDisapeared.Any()) { foreach (var columnInfo in columnsInCatalogueButSinceDisapeared) { bool deleteExtraColumnInfos = notifier.OnCheckPerformed(new CheckEventArgs("The ColumnInfo " + columnInfo.GetRuntimeName() + " no longer appears in the live table.", CheckResult.Fail, null, "Delete ColumnInfo " + columnInfo.GetRuntimeName())); if (deleteExtraColumnInfos) { columnInfo.DeleteInDatabase(); } else { IsSynched = false; } } } _tableToSync.ClearAllInjections(); if (IsSynched) { IsSynched = SynchronizeTypes(notifier, liveColumns); } if (IsSynched && !_tableToSync.IsTableValuedFunction)//table valued functions don't have primary keys! { IsSynched = SynchronizeField(liveColumns, _tableToSync.ColumnInfos, notifier, "IsPrimaryKey"); } if (IsSynched && !_tableToSync.IsTableValuedFunction)//table valued functions don't have autonum { IsSynched = SynchronizeField(liveColumns, _tableToSync.ColumnInfos, notifier, "IsAutoIncrement"); } if (IsSynched) { IsSynched = SynchronizeField(liveColumns, _tableToSync.ColumnInfos, notifier, "Collation"); } if (IsSynched && _tableToSync.IsTableValuedFunction) { IsSynched = SynchronizeParameters((TableValuedFunctionImporter)importer, notifier); } _tableToSync.ClearAllInjections(); //get list of primary keys from underlying table return(IsSynched); }
/// <inheritdoc/> public void DoImport(out ITableInfo tableInfoCreated, out ColumnInfo[] columnInfosCreated) { string tableName; string databaseName; try { _server.TestConnection(); } catch (Exception e) { throw new Exception($"Could not reach server {_server.Name}", e); } var querySyntaxHelper = _server.GetQuerySyntaxHelper(); tableName = querySyntaxHelper.EnsureWrapped(_importDatabaseName); if (_type == DatabaseType.MicrosoftSQLServer || _type == DatabaseType.PostgreSql) { tableName += "." + (querySyntaxHelper.EnsureWrapped(_importFromSchema ?? querySyntaxHelper.GetDefaultSchemaIfAny())) + "."; } else if (_type == DatabaseType.MySql || _type == DatabaseType.Oracle) { tableName += "."; } else { throw new NotSupportedException("Unknown Type:" + _type); } tableName += querySyntaxHelper.EnsureWrapped(_importTableName); databaseName = querySyntaxHelper.EnsureWrapped(_importDatabaseName); DiscoveredColumn[] discoveredColumns = _server.ExpectDatabase(_importDatabaseName) .ExpectTable(_importTableName, _importFromSchema, _importTableType) .DiscoverColumns(); TableInfo parent = new TableInfo(_repository, tableName) { DatabaseType = _type, Database = databaseName, Server = _importFromServer, Schema = _importFromSchema, IsView = _importTableType == TableType.View }; parent.SaveToDatabase(); List <ColumnInfo> newCols = new List <ColumnInfo>(); foreach (DiscoveredColumn discoveredColumn in discoveredColumns) { newCols.Add(CreateNewColumnInfo(parent, discoveredColumn)); } tableInfoCreated = parent; columnInfosCreated = newCols.ToArray(); //if there is a username then we need to associate it with the TableInfo we just created if (!string.IsNullOrWhiteSpace(_username)) { DataAccessCredentialsFactory credentialsFactory = new DataAccessCredentialsFactory(_repository); credentialsFactory.Create(tableInfoCreated, _username, _password, _usageContext); } }
public override void Check(ICheckNotifier notifier) { //there is a catalogue if (_catalogue == null) { notifier.OnCheckPerformed(new CheckEventArgs("Catalogue has not been set, either use the constructor with Catalogue parameter or use the blank constructor and call CatalogueSupportsReport instead", CheckResult.Fail)); return; } try { var dqeRepository = new DQERepository(_catalogue.CatalogueRepository); notifier.OnCheckPerformed(new CheckEventArgs("Found DQE reporting server " + dqeRepository.DiscoveredServer.Name, CheckResult.Success)); } catch (Exception e) { notifier.OnCheckPerformed( new CheckEventArgs( "Failed to create DQE Repository, possibly there is no DataQualityEngine Reporting Server (ExternalDatabaseServer). You will need to create/set one in CatalogueManager by using 'Locations=>Manage External Servers...'", CheckResult.Fail, e)); } try { SetupLogging(_catalogue.CatalogueRepository); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Failed to setup logging of DQE runs", CheckResult.Fail, e)); return; } //there is XML if (string.IsNullOrWhiteSpace(_catalogue.ValidatorXML)) { notifier.OnCheckPerformed(new CheckEventArgs("There is no ValidatorXML specified for the Catalogue " + _catalogue + ", you must configure validation rules", CheckResult.Fail)); return; } notifier.OnCheckPerformed(new CheckEventArgs("Found ValidatorXML specified for the Catalogue " + _catalogue + ":" + Environment.NewLine + _catalogue.ValidatorXML, CheckResult.Success)); //the XML is legit try { _validator = Validator.LoadFromXml(_catalogue.ValidatorXML); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("ValidatorXML for Catalogue " + _catalogue + " could not be deserialized into a Validator", CheckResult.Fail, e)); return; } notifier.OnCheckPerformed(new CheckEventArgs("Deserialized validation XML successfully", CheckResult.Success)); //there is a server try { _server = _catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.InternalDataProcessing, true); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Could not get connection to Catalogue " + _catalogue, CheckResult.Fail, e)); return; } notifier.OnCheckPerformed(new CheckEventArgs("Found connection string for Catalogue " + _catalogue, CheckResult.Success)); //we can connect to the server try { _server.TestConnection(); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Could not connect to server for Catalogue " + _catalogue, CheckResult.Fail, e)); } //there is extraction SQL try { _queryBuilder = new QueryBuilder("", ""); _queryBuilder.AddColumnRange(_catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); var duplicates = _queryBuilder.SelectColumns.GroupBy(c => c.IColumn.GetRuntimeName()).SelectMany(grp => grp.Skip(1)).ToArray(); if (duplicates.Any()) { foreach (QueryTimeColumn column in duplicates) { notifier.OnCheckPerformed( new CheckEventArgs( "The column name " + column.IColumn.GetRuntimeName() + " is duplicated in the SELECT command, column names must be unique! Most likely you have 2+ columns with the same name (from different tables) or duplicate named CatalogueItem/Aliases for the same underlying ColumnInfo", CheckResult.Fail)); } } notifier.OnCheckPerformed(new CheckEventArgs("Query Builder decided the extraction SQL was:" + Environment.NewLine + _queryBuilder.SQL, CheckResult.Success)); SetupAdditionalValidationRules(notifier); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Failed to generate extraction SQL", CheckResult.Fail, e)); } //for each thing we are about to try and validate foreach (ItemValidator itemValidator in _validator.ItemValidators) { //is there a column in the query builder that matches it if ( //there isnt! !_queryBuilder.SelectColumns.Any( c => c.IColumn.GetRuntimeName().Equals(itemValidator.TargetProperty))) { notifier.OnCheckPerformed( new CheckEventArgs( "Could not find a column in the extraction SQL that would match TargetProperty " + itemValidator.TargetProperty, CheckResult.Fail)); } else { //there is that is good notifier.OnCheckPerformed( new CheckEventArgs("Found column in query builder columns which matches TargetProperty " + itemValidator.TargetProperty, CheckResult.Success)); } } _containsDataLoadID = _queryBuilder.SelectColumns.Any( c => c.IColumn.GetRuntimeName().Equals(_dataLoadRunFieldName)); if (_containsDataLoadID) { notifier.OnCheckPerformed( new CheckEventArgs( "Found " + _dataLoadRunFieldName + " field in ExtractionInformation", CheckResult.Success)); } else { notifier.OnCheckPerformed( new CheckEventArgs( "Did not find ExtractionInformation for a column called " + _dataLoadRunFieldName + ", this will prevent you from viewing the resulting report subdivided by data load batch (make sure you have this column and that it is marked as extractable)", CheckResult.Warning)); } if (_catalogue.PivotCategory_ExtractionInformation_ID == null) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue does not have a pivot category so all records will appear as PivotCategory 'ALL'", CheckResult.Warning)); } else { _pivotCategory = _catalogue.PivotCategory_ExtractionInformation.GetRuntimeName(); notifier.OnCheckPerformed( new CheckEventArgs( "Found time Pivot Category field " + _pivotCategory + " so we will be able to generate a categorised tesseract (evaluation, periodicity, consequence, pivot category)", CheckResult.Success)); } var tblValuedFunctions = _catalogue.GetTableInfoList(true).Where(t => t.IsTableValuedFunction).ToArray(); if (tblValuedFunctions.Any()) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue contains 1+ table valued function in it's TableInfos (" + string.Join(",", tblValuedFunctions.Select(t => t.ToString())), CheckResult.Fail)); } if (_catalogue.TimeCoverage_ExtractionInformation_ID == null) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue does not have a time coverage field set", CheckResult.Fail)); } else { var periodicityExtractionInformation = _catalogue.TimeCoverage_ExtractionInformation; _timePeriodicityField = periodicityExtractionInformation.GetRuntimeName(); notifier.OnCheckPerformed( new CheckEventArgs( "Found time coverage field " + _timePeriodicityField, CheckResult.Success)); if (!periodicityExtractionInformation.ColumnInfo.Data_type.ToLower().Contains("date")) { notifier.OnCheckPerformed( new CheckEventArgs( "Time periodicity field " + _timePeriodicityField + " was of type " + periodicityExtractionInformation.ColumnInfo.Data_type + " (expected the type name to contain the word 'date' - ignoring caps). It is possible (but unlikely) that you have dealt with this by applying a transform to the underlying ColumnInfo as part of the ExtractionInformation, if so you can ignore this message.", CheckResult.Warning)); } else { notifier.OnCheckPerformed( new CheckEventArgs( "Time periodicity field " + _timePeriodicityField + " is a legit date!", CheckResult.Success)); } } }
private static int RunDatabaseTarget(TargetDatabase configDatabase, ProgramOptions opts) { var batchSize = Math.Max(1, configDatabase.Batches); //if we are going into a database we definitely do not need pixels! if (opts.NoPixels == false) { opts.NoPixels = true; } Stopwatch swTotal = new Stopwatch(); swTotal.Start(); string neverDistinct = "SOPInstanceUID"; if (!File.Exists(configDatabase.Template)) { Console.WriteLine($"Listed template file '{configDatabase.Template}' does not exist"); return(-1); } ImageTableTemplateCollection template; try { template = ImageTableTemplateCollection.LoadFrom(File.ReadAllText(configDatabase.Template)); } catch (Exception e) { Console.WriteLine($"Error reading yaml from '{configDatabase.Template}'"); Console.WriteLine(e.ToString()); return(-2); } ImplementationManager.Load <MySqlImplementation>(); ImplementationManager.Load <PostgreSqlImplementation>(); ImplementationManager.Load <OracleImplementation>(); ImplementationManager.Load <MicrosoftSQLImplementation>(); var server = new DiscoveredServer(configDatabase.ConnectionString, configDatabase.DatabaseType); try { server.TestConnection(); } catch (Exception e) { Console.WriteLine($"Could not reach target server '{server.Name}'"); Console.WriteLine(e); return(-2); } var db = server.ExpectDatabase(configDatabase.DatabaseName); if (!db.Exists()) { Console.WriteLine($"Creating Database '{db.GetRuntimeName()}'"); db.Create(); Console.WriteLine("Database Created"); } else { Console.WriteLine($"Found Database '{db.GetRuntimeName()}'"); } var creator = new ImagingTableCreation(db.Server.GetQuerySyntaxHelper()); Console.WriteLine($"Image template contained schemas for {template.Tables.Count} tables. Looking for existing tables.."); //setting up bulk inserters DiscoveredTable[] tables = new DiscoveredTable[template.Tables.Count]; DataTable[][] batches = new DataTable[batchSize][]; for (var i = 0; i < batches.Length; i++) { batches[i] = new DataTable[template.Tables.Count]; } IBulkCopy[][] uploaders = new IBulkCopy[batchSize][]; for (int i = 0; i < uploaders.Length; i++) { uploaders[i] = new IBulkCopy[template.Tables.Count]; } string[] pks = new string[template.Tables.Count]; for (var i = 0; i < template.Tables.Count; i++) { var tableSchema = template.Tables[i]; var tbl = db.ExpectTable(tableSchema.TableName); tables[i] = tbl; if (configDatabase.MakeDistinct) { var col = tableSchema.Columns.Where(c => c.IsPrimaryKey).ToArray(); if (col.Length > 1) { Console.WriteLine("MakeDistinct only works with single column primary keys e.g. StudyInstanceUID / SeriesInstanceUID"); } pks[i] = col.SingleOrDefault()?.ColumnName; if (pks[i] != null) { //if it is sop instance uid then we shouldn't be trying to deduplicate if (string.Equals(pks[i], neverDistinct, StringComparison.CurrentCultureIgnoreCase)) { pks[i] = null; } else { //we will make this a primary key later on col.Single().IsPrimaryKey = false; Console.WriteLine($"MakeDistinct will apply to '{pks[i]}' on '{tbl.GetFullyQualifiedName()}'"); } } } bool create = true; if (tbl.Exists()) { if (configDatabase.DropTables) { Console.WriteLine($"Dropping existing table '{tbl.GetFullyQualifiedName()}'"); tbl.Drop(); } else { Console.WriteLine($"Table '{tbl.GetFullyQualifiedName()}' already existed (so will not be created)"); create = false; } } if (create) { Console.WriteLine($"About to create '{tbl.GetFullyQualifiedName()}'"); creator.CreateTable(tbl, tableSchema); Console.WriteLine($"Successfully created create '{tbl.GetFullyQualifiedName()}'"); } Console.WriteLine($"Creating uploader for '{tbl.GetRuntimeName()}''"); for (int j = 0; j < batchSize; j++) { //fetch schema var dt = tbl.GetDataTable(); dt.Rows.Clear(); batches[j][i] = dt; uploaders[j][i] = tbl.BeginBulkInsert(); } } var tasks = new Task[batchSize]; IPersonCollection identifiers = GetPeople(opts, out Random r); for (int i = 0; i < batchSize; i++) { var batch = i; tasks[i] = new Task(() => // lgtm[cs/local-not-disposed] { RunBatch(identifiers, opts, r, batches[batch], uploaders[batch]); }); tasks[i].Start(); } Task.WaitAll(tasks); swTotal.Stop(); for (var i = 0; i < tables.Length; i++) { if (pks[i] == null) { continue; } Console.WriteLine($"{DateTime.Now} Making table '{tables[i]}' distinct (this may take a long time)"); var tbl = tables[i]; tbl.MakeDistinct(500000000); Console.WriteLine($"{DateTime.Now} Creating primary key on '{tables[i]}' of '{pks[i]}'"); tbl.CreatePrimaryKey(500000000, tbl.DiscoverColumn(pks[i])); } Console.WriteLine("Final Row Counts:"); foreach (DiscoveredTable t in tables) { Console.WriteLine($"{t.GetFullyQualifiedName()}: {t.GetRowCount():0,0}"); } Console.WriteLine("Total Running Time:" + swTotal.Elapsed); return(0); }
public bool CreateDatabase(string createTablesAndFunctionsSql, string initialVersionNumber, ICheckNotifier notifier) { try { // The _builder has InitialCatalog set which will cause the pre-database creation connection to fail, so create one which doesn't contain InitialCatalog var serverBuilder = new SqlConnectionStringBuilder(_builder.ConnectionString) { InitialCatalog = "" }; DiscoveredServer server = new DiscoveredServer(serverBuilder); server.TestConnection(); var db = server.ExpectDatabase(_database); if (db.Exists())//make sure database does not already exist { bool createAnyway = notifier.OnCheckPerformed(new CheckEventArgs("Database already exists", CheckResult.Warning, null, "Attempt to create database inside existing database (will cause problems if the database is not empty)?")); if (!createAnyway) { throw new Exception("User chose not continue"); } } else { using (var con = server.GetConnection())//do it manually { con.Open(); server.GetCommand("CREATE DATABASE " + _database + (BinaryCollation?" COLLATE Latin1_General_BIN2":""), con).ExecuteNonQuery(); notifier.OnCheckPerformed(new CheckEventArgs("Database " + _database + " created", CheckResult.Success, null)); } } SqlConnection.ClearAllPools(); using (var con = db.Server.GetConnection()) { con.Open(); var cmd = db.Server.GetCommand("CREATE SCHEMA " + RoundhouseSchemaName, con); cmd.ExecuteNonQuery(); var sql = @"CREATE TABLE [RoundhousE].[ScriptsRun]( [id] [bigint] IDENTITY(1,1) NOT NULL, [version_id] [bigint] NULL, [script_name] [nvarchar](255) NULL, [text_of_script] [text] NULL, [text_hash] [nvarchar](512) NULL, [one_time_script] [bit] NULL, [entry_date] [datetime] NULL, [modified_date] [datetime] NULL, [entered_by] [nvarchar](50) NULL, PRIMARY KEY CLUSTERED ( [id] ASC ) ) CREATE TABLE [RoundhousE].[Version]( [id] [bigint] IDENTITY(1,1) NOT NULL, [repository_path] [nvarchar](255) NULL, [version] [nvarchar](50) NULL, [entry_date] [datetime] NULL, [modified_date] [datetime] NULL, [entered_by] [nvarchar](50) NULL, PRIMARY KEY CLUSTERED ( [id] ASC ) ) "; var cmd2 = db.Server.GetCommand(sql, con); cmd2.ExecuteNonQuery(); } RunSQL(db, createTablesAndFunctionsSql, InitialDatabaseScriptName); SetVersion(db, "Initial Setup", initialVersionNumber); notifier.OnCheckPerformed(new CheckEventArgs("Tables created", CheckResult.Success, null)); notifier.OnCheckPerformed(new CheckEventArgs("Setup Completed successfully", CheckResult.Success, null)); return(true); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Create failed", CheckResult.Fail, e)); return(false); } }
private void Initialize() { //Figure out which UID columns exist in the Catalogue, do not require file path to be in Catalogue _columnSet = QueryToExecuteColumnSet.Create(_catalogue, false); //Tells us the DBMS type var syntax = _catalogue.GetQuerySyntaxHelper(); //For storing the OR container and filter(s) var memory = new MemoryCatalogueRepository(); //builds SQL we will run in lookup stage _queryBuilder = new QueryBuilder(null, null); //all we care about is if the uid appears if it does then we are rejecting it _queryBuilder.TopX = 1; //Filter is OR i.e. StudyInstanceUID = @StudyInstanceUID OR SeriesInstanceUID = @SeriesInstanceUID var container = _queryBuilder.RootFilterContainer = new SpontaneouslyInventedFilterContainer(memory, null, null, FilterContainerOperation.OR); //Build SELECT and WHERE bits of the query if (_columnSet.StudyTagColumn != null) { _queryBuilder.AddColumn(_columnSet.StudyTagColumn); string whereSql = $"{_columnSet.StudyTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultStudyIdColumnName}"; _studyFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Study UID Filter", "", null); container.AddChild(_studyFilter); } if (_columnSet.SeriesTagColumn != null) { _queryBuilder.AddColumn(_columnSet.SeriesTagColumn); string whereSql = $"{_columnSet.SeriesTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultSeriesIdColumnName}"; _seriesFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Series UID Filter", "", null); container.AddChild(_seriesFilter); } if (_columnSet.InstanceTagColumn != null) { _queryBuilder.AddColumn(_columnSet.InstanceTagColumn); string whereSql = $"{_columnSet.InstanceTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultInstanceIdColumnName}"; _instanceFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Instance UID Filter", "", null); container.AddChild(_instanceFilter); } // Make sure the query builder looks valid if (!_queryBuilder.SelectColumns.Any()) { throw new NotSupportedException($"Blacklist Catalogue {_catalogue} (ID={_catalogue.ID}) did not have any Core ExtractionInformation columns corresponding to any of the image UID tags (e.g. StudyInstanceUID, SeriesInstanceUID, SOPInstanceUID)."); } try { // make sure we can connect to the server _server = _catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.DataExport, true); _server.TestConnection(); } catch (Exception e) { throw new Exception($"Failed to test connection for Catalogue {_catalogue}", e); } // run a test lookup query against the remote database DoLookup("test1", "test2", "test3"); }