public string GetSql() { var qb = new QueryBuilder(null, null); if (ViewType == ViewType.TOP_100) { qb.TopX = 100; } var memoryRepository = new MemoryCatalogueRepository(); qb.AddColumnRange(TableInfo.ColumnInfos.Select(c => new ColumnInfoToIColumn(memoryRepository, c)).ToArray()); var filter = GetFilterIfAny(); if (filter != null) { qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(memoryRepository, null, new[] { filter }, FilterContainerOperation.AND); } if (ViewType == ViewType.Aggregate) { qb.AddCustomLine("count(*),", QueryComponent.QueryTimeColumn); } var sql = qb.SQL; if (ViewType == ViewType.Aggregate) { throw new NotSupportedException("ViewType.Aggregate can only be applied to ColumnInfos not TableInfos"); } return(sql); }
private void TestWithParameterValueThatRowsAreReturned() { var p = _tvfTableInfo.GetAllParameters().Single(); p.Value = "5"; p.SaveToDatabase(); var qb = new QueryBuilder("", ""); qb.AddColumnRange(_tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); var sql = qb.SQL; var db = DataAccessPortal.GetInstance().ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); using (var con = db.Server.GetConnection()) { con.Open(); var r = db.Server.GetCommand(sql, con).ExecuteReader(); int rowsReturned = 0; while (r.Read()) { rowsReturned++; Assert.NotNull(r["chi"]); Assert.NotNull(r["definitionID"]); } Assert.AreEqual(rowsReturned, 5); } }
private void GetInsertData(DiscoveredServer server, DiscoveredDatabase database, ICheckNotifier checkNotifier) { var memoryRepository = new MemoryCatalogueRepository(); var sytnaxHelper = server.GetQuerySyntaxHelper(); string tableName = _tableInfo.Name; string archiveTableName = sytnaxHelper.EnsureFullyQualified(database.GetRuntimeName(), _tableInfo.Schema, _tableInfo.GetRuntimeName() + "_Archive"); var whereStatement = ""; foreach (ColumnInfo pk in _pks) { whereStatement += string.Format("{0}.{1} = {2}.{1} AND ", tableName, pk.GetRuntimeName(), archiveTableName); } var qb = new QueryBuilder(null, null, new[] { _tableInfo }); qb.TopX = _batchSize; qb.AddColumnRange(_tableInfo.ColumnInfos.Select(c => new ColumnInfoToIColumn(memoryRepository, c)).ToArray()); //where var filter1 = new SpontaneouslyInventedFilter(memoryRepository, null, SpecialFieldNames.DataLoadRunID + " = " + _dataLoadRunID, "DataLoadRunID matches", null, null); var filter2 = new SpontaneouslyInventedFilter(memoryRepository, null, string.Format(@" not exists ( select 1 from {0} where {1} {2} < {3} )", archiveTableName, whereStatement, SpecialFieldNames.DataLoadRunID, _dataLoadRunID), "Record doesn't exist in archive", null, null); qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(memoryRepository, null, new [] { filter1, filter2 }, FilterContainerOperation.AND); Inserts = new DataTable(); FillTableWithQueryIfUserConsents(Inserts, qb.SQL, checkNotifier, server); }
public List <JoinInfo> GetJoinInfosRequiredCatalogue() { var qb = new QueryBuilder(null, null); qb.AddColumnRange(Catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); qb.RegenerateSQL(); return(qb.JoinsUsedInQuery); }
public List <Lookup> GetLookupsRequiredCatalogue() { var qb = new QueryBuilder(null, null); qb.AddColumnRange(Catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); qb.RegenerateSQL(); return(qb.GetDistinctRequiredLookups().ToList()); }
private string GetSql(ICatalogue mainCata) { mainCata.ClearAllInjections(); var qb = new QueryBuilder(null, null); qb.AddColumnRange(mainCata.GetAllExtractionInformation(ExtractionCategory.Any)); return(qb.SQL); }
private void TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException() { //we should have problems reading from the table valued function var qb = new QueryBuilder("", ""); //table valued function should have 2 fields (chi and definitionID) Assert.AreEqual(2, _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Count()); qb.AddColumnRange(_tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); var ex = Assert.Throws <QueryBuildingException>(() => Console.WriteLine(qb.SQL)); Assert.AreEqual("No Value defined for Parameter @numberOfRecords", ex.Message); }
private string GenerateExtractionSQLForCatalogue(ExtractionInformation[] extractionInformations) { QueryBuilder builder = new QueryBuilder(null, null); builder.AddColumnRange(extractionInformations); List <ExtractionFilter> filters = new List <ExtractionFilter>(); foreach (ExtractionFilter f in olvFilters.CheckedObjects) { filters.Add(f); } builder.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, filters.ToArray(), FilterContainerOperation.AND); return(builder.SQL); }
public void TestMemoryRepository_QueryBuilder() { Catalogue memCatalogue = new Catalogue(_repo, "My New Catalogue"); CatalogueItem myCol = new CatalogueItem(_repo, memCatalogue, "MyCol1"); var ti = new TableInfo(_repo, "My table"); var col = new ColumnInfo(_repo, "Mycol", "varchar(10)", ti); ExtractionInformation ei = new ExtractionInformation(_repo, myCol, col, col.Name); Assert.AreEqual(memCatalogue, _repo.GetObjectByID <Catalogue>(memCatalogue.ID)); var qb = new QueryBuilder(null, null); qb.AddColumnRange(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); Assert.AreEqual(@" SELECT Mycol FROM My table", qb.SQL); }
public override void Check(ICheckNotifier notifier) { //there is a catalogue if (_catalogue == null) { notifier.OnCheckPerformed(new CheckEventArgs("Catalogue has not been set, either use the constructor with Catalogue parameter or use the blank constructor and call CatalogueSupportsReport instead", CheckResult.Fail)); return; } try { var dqeRepository = new DQERepository(_catalogue.CatalogueRepository); notifier.OnCheckPerformed(new CheckEventArgs("Found DQE reporting server " + dqeRepository.DiscoveredServer.Name, CheckResult.Success)); } catch (Exception e) { notifier.OnCheckPerformed( new CheckEventArgs( "Failed to create DQE Repository, possibly there is no DataQualityEngine Reporting Server (ExternalDatabaseServer). You will need to create/set one in CatalogueManager by using 'Locations=>Manage External Servers...'", CheckResult.Fail, e)); } try { SetupLogging(_catalogue.CatalogueRepository); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Failed to setup logging of DQE runs", CheckResult.Fail, e)); return; } //there is XML if (string.IsNullOrWhiteSpace(_catalogue.ValidatorXML)) { notifier.OnCheckPerformed(new CheckEventArgs("There is no ValidatorXML specified for the Catalogue " + _catalogue + ", you must configure validation rules", CheckResult.Fail)); return; } notifier.OnCheckPerformed(new CheckEventArgs("Found ValidatorXML specified for the Catalogue " + _catalogue + ":" + Environment.NewLine + _catalogue.ValidatorXML, CheckResult.Success)); //the XML is legit try { _validator = Validator.LoadFromXml(_catalogue.ValidatorXML); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("ValidatorXML for Catalogue " + _catalogue + " could not be deserialized into a Validator", CheckResult.Fail, e)); return; } notifier.OnCheckPerformed(new CheckEventArgs("Deserialized validation XML successfully", CheckResult.Success)); //there is a server try { _server = _catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.InternalDataProcessing, true); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Could not get connection to Catalogue " + _catalogue, CheckResult.Fail, e)); return; } notifier.OnCheckPerformed(new CheckEventArgs("Found connection string for Catalogue " + _catalogue, CheckResult.Success)); //we can connect to the server try { _server.TestConnection(); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Could not connect to server for Catalogue " + _catalogue, CheckResult.Fail, e)); } //there is extraction SQL try { _queryBuilder = new QueryBuilder("", ""); _queryBuilder.AddColumnRange(_catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); var duplicates = _queryBuilder.SelectColumns.GroupBy(c => c.IColumn.GetRuntimeName()).SelectMany(grp => grp.Skip(1)).ToArray(); if (duplicates.Any()) { foreach (QueryTimeColumn column in duplicates) { notifier.OnCheckPerformed( new CheckEventArgs( "The column name " + column.IColumn.GetRuntimeName() + " is duplicated in the SELECT command, column names must be unique! Most likely you have 2+ columns with the same name (from different tables) or duplicate named CatalogueItem/Aliases for the same underlying ColumnInfo", CheckResult.Fail)); } } notifier.OnCheckPerformed(new CheckEventArgs("Query Builder decided the extraction SQL was:" + Environment.NewLine + _queryBuilder.SQL, CheckResult.Success)); SetupAdditionalValidationRules(notifier); } catch (Exception e) { notifier.OnCheckPerformed(new CheckEventArgs("Failed to generate extraction SQL", CheckResult.Fail, e)); } //for each thing we are about to try and validate foreach (ItemValidator itemValidator in _validator.ItemValidators) { //is there a column in the query builder that matches it if ( //there isnt! !_queryBuilder.SelectColumns.Any( c => c.IColumn.GetRuntimeName().Equals(itemValidator.TargetProperty))) { notifier.OnCheckPerformed( new CheckEventArgs( "Could not find a column in the extraction SQL that would match TargetProperty " + itemValidator.TargetProperty, CheckResult.Fail)); } else { //there is that is good notifier.OnCheckPerformed( new CheckEventArgs("Found column in query builder columns which matches TargetProperty " + itemValidator.TargetProperty, CheckResult.Success)); } } _containsDataLoadID = _queryBuilder.SelectColumns.Any( c => c.IColumn.GetRuntimeName().Equals(_dataLoadRunFieldName)); if (_containsDataLoadID) { notifier.OnCheckPerformed( new CheckEventArgs( "Found " + _dataLoadRunFieldName + " field in ExtractionInformation", CheckResult.Success)); } else { notifier.OnCheckPerformed( new CheckEventArgs( "Did not find ExtractionInformation for a column called " + _dataLoadRunFieldName + ", this will prevent you from viewing the resulting report subdivided by data load batch (make sure you have this column and that it is marked as extractable)", CheckResult.Warning)); } if (_catalogue.PivotCategory_ExtractionInformation_ID == null) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue does not have a pivot category so all records will appear as PivotCategory 'ALL'", CheckResult.Warning)); } else { _pivotCategory = _catalogue.PivotCategory_ExtractionInformation.GetRuntimeName(); notifier.OnCheckPerformed( new CheckEventArgs( "Found time Pivot Category field " + _pivotCategory + " so we will be able to generate a categorised tesseract (evaluation, periodicity, consequence, pivot category)", CheckResult.Success)); } var tblValuedFunctions = _catalogue.GetTableInfoList(true).Where(t => t.IsTableValuedFunction).ToArray(); if (tblValuedFunctions.Any()) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue contains 1+ table valued function in it's TableInfos (" + string.Join(",", tblValuedFunctions.Select(t => t.ToString())), CheckResult.Fail)); } if (_catalogue.TimeCoverage_ExtractionInformation_ID == null) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue does not have a time coverage field set", CheckResult.Fail)); } else { var periodicityExtractionInformation = _catalogue.TimeCoverage_ExtractionInformation; _timePeriodicityField = periodicityExtractionInformation.GetRuntimeName(); notifier.OnCheckPerformed( new CheckEventArgs( "Found time coverage field " + _timePeriodicityField, CheckResult.Success)); if (!periodicityExtractionInformation.ColumnInfo.Data_type.ToLower().Contains("date")) { notifier.OnCheckPerformed( new CheckEventArgs( "Time periodicity field " + _timePeriodicityField + " was of type " + periodicityExtractionInformation.ColumnInfo.Data_type + " (expected the type name to contain the word 'date' - ignoring caps). It is possible (but unlikely) that you have dealt with this by applying a transform to the underlying ColumnInfo as part of the ExtractionInformation, if so you can ignore this message.", CheckResult.Warning)); } else { notifier.OnCheckPerformed( new CheckEventArgs( "Time periodicity field " + _timePeriodicityField + " is a legit date!", CheckResult.Success)); } } }
/// <summary> /// Checks that the Catalogue has a sensible Name (See <see cref="IsAcceptableName(string)"/>). Then checks that there are no missing ColumnInfos /// </summary> /// <param name="notifier"></param> public void Check(ICheckNotifier notifier) { string reason; if (!IsAcceptableName(Name, out reason)) { notifier.OnCheckPerformed( new CheckEventArgs( "Catalogue name " + Name + " (ID=" + ID + ") does not follow naming conventions reason:" + reason, CheckResult.Fail)); } else { notifier.OnCheckPerformed(new CheckEventArgs("Catalogue name " + Name + " follows naming conventions ", CheckResult.Success)); } ITableInfo[] tables = GetTableInfoList(true); foreach (TableInfo t in tables) { t.Check(notifier); } ExtractionInformation[] extractionInformations = this.GetAllExtractionInformation(ExtractionCategory.Core); if (extractionInformations.Any()) { bool missingColumnInfos = false; foreach (ExtractionInformation missingColumnInfo in extractionInformations.Where(e => e.ColumnInfo == null)) { notifier.OnCheckPerformed( new CheckEventArgs( "ColumnInfo behind ExtractionInformation/CatalogueItem " + missingColumnInfo.GetRuntimeName() + " is MISSING, it must have been deleted", CheckResult.Fail)); missingColumnInfos = true; } if (missingColumnInfos) { return; } notifier.OnCheckPerformed( new CheckEventArgs( "Found " + extractionInformations.Length + " ExtractionInformation(s), preparing to validate SQL with QueryBuilder", CheckResult.Success)); var accessContext = DataAccessContext.InternalDataProcessing; try { var server = DataAccessPortal.GetInstance().ExpectDistinctServer(tables, accessContext, false); using (var con = server.GetConnection()) { con.Open(); string sql; try { QueryBuilder qb = new QueryBuilder(null, null); qb.TopX = 1; qb.AddColumnRange(extractionInformations); sql = qb.SQL; notifier.OnCheckPerformed(new CheckEventArgs("Query Builder assembled the following SQL:" + Environment.NewLine + sql, CheckResult.Success)); } catch (Exception e) { notifier.OnCheckPerformed( new CheckEventArgs("Could not generate extraction SQL for Catalogue " + this, CheckResult.Fail, e)); return; } using (var cmd = DatabaseCommandHelper.GetCommand(sql, con)) { cmd.CommandTimeout = 10; using (DbDataReader r = cmd.ExecuteReader()) { if (r.Read()) { notifier.OnCheckPerformed(new CheckEventArgs("successfully read a row of data from the extraction SQL of Catalogue " + this, CheckResult.Success)); } else { notifier.OnCheckPerformed(new CheckEventArgs("The query produced an empty result set for Catalogue" + this, CheckResult.Warning)); } } } con.Close(); } } catch (Exception e) { notifier.OnCheckPerformed( new CheckEventArgs( "Extraction SQL Checking failed for Catalogue " + this + " make sure that you can access the underlying server under DataAccessContext." + accessContext + " and that the SQL generated runs correctly (see internal exception for details)", CheckResult.Fail, e)); } } //supporting documents var f = new SupportingDocumentsFetcher(this); f.Check(notifier); }
public void CreateANOVersionTest_LookupsAndExtractionInformations() { var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); db.Create(true); BulkTestsData bulk = new BulkTestsData(CatalogueRepository, DiscoveredDatabaseICanCreateRandomTablesIn, 100); bulk.SetupTestData(); bulk.ImportAsCatalogue(); //Create a lookup table on the server var lookupTbl = DiscoveredDatabaseICanCreateRandomTablesIn.CreateTable("z_sexLookup", new[] { new DatabaseColumnRequest("Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("hb_Code", "varchar(1)") { IsPrimaryKey = true }, new DatabaseColumnRequest("Description", "varchar(100)") }); //import a reference to the table TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, lookupTbl); ColumnInfo[] lookupColumnInfos; TableInfo lookupTableInfo; importer.DoImport(out lookupTableInfo, out lookupColumnInfos); //Create a Lookup reference var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex"); var ciHb = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract"); var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi"); eiChi.IsExtractionIdentifier = true; eiChi.SaveToDatabase(); var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century"); eiCentury.HashOnDataRelease = true; eiCentury.ExtractionCategory = ExtractionCategory.Internal; eiCentury.SaveToDatabase(); //add a transform var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode"); eiPostcode.SelectSQL = string.Format("LEFT(10,{0}.[current_postcode])", eiPostcode.ColumnInfo.TableInfo.Name); eiPostcode.Alias = "MyMutilatedColumn"; eiPostcode.SaveToDatabase(); //add a combo transform var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn"); var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename"); var colSurname = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname"); var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename, colForename + " + ' ' + " + colSurname); eiComboCol.Alias = "ComboColumn"; eiComboCol.SaveToDatabase(); var eiDataLoadRunId = bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID)); eiDataLoadRunId.DeleteInDatabase(); var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0], ExtractionJoinType.Left, null); //now lets make it worse, lets assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract var compositeLookup = new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]); //now lets make the _Desc field in the original Catalogue int orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order; //bump everyone down 1 foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci => ci.ExtractionInformation).Where(e => e != null && e.Order > orderToInsertDescriptionFieldAt)) { toBumpDown.Order++; toBumpDown.SaveToDatabase(); } var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc"); var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2], lookupColumnInfos[2].Name); eiDescription.Alias = "Sex_Desc"; eiDescription.Order = orderToInsertDescriptionFieldAt + 1; eiDescription.ExtractionCategory = ExtractionCategory.Supplemental; eiDescription.SaveToDatabase(); bulk.catalogue.ClearAllInjections(); //check it worked QueryBuilder qb = new QueryBuilder(null, null); qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qb.SQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qb.TablesUsedInQuery.Count); //the query builder should have identified the lookup Assert.AreEqual(lookup, qb.GetDistinctRequiredLookups().Single()); //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested//////////////////////////////////////////////////// var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); planManager.TargetDatabase = db; //setup test rules for migrator CreateMigrationRules(planManager, bulk); //rules should pass checks Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); engine.Execute(); //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested//////////////////////////////////////////////////// var anoCatalogue = CatalogueRepository.GetAllObjects <Catalogue>().Single(c => c.Folder.Path.StartsWith("\\ano")); Assert.IsTrue(anoCatalogue.Exists()); //The new Catalogue should have the same number of ExtractionInformations var eiSource = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order).ToArray(); Assert.AreEqual(eiSource.Length, eiDestination.Length, "Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)"); for (int i = 0; i < eiSource.Length; i++) { Assert.AreEqual(eiSource[i].Order, eiDestination[i].Order, "ExtractionInformations in the source and destination Catalogue should have the same order"); Assert.AreEqual(eiSource[i].GetRuntimeName(), eiDestination[i].GetRuntimeName().Replace("ANO", ""), "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)"); Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, "Old / New ANO ExtractionInformations did not match on ExtractionCategory"); Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier"); Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, "Old / New ANO ExtractionInformations did not match on HashOnDataRelease"); Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, "Old / New ANO ExtractionInformations did not match on IsPrimaryKey"); } //check it worked QueryBuilder qbdestination = new QueryBuilder(null, null); qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); //The query builder should be able to succesfully create SQL Console.WriteLine(qbdestination.SQL); var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("LEFT(10,{0}.[current_postcode])", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiPostcode.SelectSQL); var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("ComboColumn")); //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... Assert.AreEqual(string.Format("{0}.[forename] + ' ' + {0}.[surname]", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiComboCol.SelectSQL); //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count); //the query builder should have identified the lookup but it should be the new one not the old one Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), "New query builder for ano catalogue did not correctly identify that there was a Lookup"); Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), "New query builder for ano catalogue identified the OLD Lookup!"); Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(), "The new Lookup did not have the composite join key (sex/hb_extract)"); Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!"); db.Drop(); var exports = CatalogueRepository.GetAllObjects <ObjectExport>().Count(); var imports = CatalogueRepository.GetAllObjects <ObjectImport>().Count(); Assert.AreEqual(exports, imports); Assert.IsTrue(exports > 0); }
public void TestAnonymisingJoinKey() { //Create a plan for the first Catlogue (Tests) - single Table dataset var plan1 = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata1); var testIdHeadPlan = plan1.GetPlanForColumnInfo(c1.Single(c => c.GetRuntimeName().Equals("TestId"))); plan1.TargetDatabase = _destinationDatabase; //the plan is that the column TestId should be anonymised - where it's name will become ANOTestId testIdHeadPlan.Plan = Plan.ANO; testIdHeadPlan.ANOTable = _anoTable; plan1.Check(new ThrowImmediatelyCheckNotifier()); var engine1 = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, plan1); engine1.Execute(); var plan1ExtractionInformationsAtDestination = engine1.NewCatalogue.GetAllExtractionInformation(ExtractionCategory.Any); var ei1 = plan1ExtractionInformationsAtDestination.Single(e => e.GetRuntimeName().Equals("ANOTestId")); Assert.IsTrue(ei1.Exists()); //Now create a plan for the combo Catalogue which contains references to both tables (Tests and Results). Remember Tests has already been migrated as part of plan1 var plan2 = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, _comboCata); //tell it to skip table 1 (Tests) and only anonymise Results plan2.SkippedTables.Add(t1); plan2.TargetDatabase = _destinationDatabase; plan2.Check(new ThrowImmediatelyCheckNotifier()); //Run the anonymisation var engine2 = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, plan2); engine2.Execute(); //Did it succesfully pick SetUp the correct ANO column var plan2ExtractionInformationsAtDestination = engine2.NewCatalogue.GetAllExtractionInformation(ExtractionCategory.Any); var ei2 = plan2ExtractionInformationsAtDestination.Single(e => e.GetRuntimeName().Equals("ANOTestId")); Assert.IsTrue(ei2.Exists()); //and can the query be executed succesfully var qb = new QueryBuilder(null, null); qb.AddColumnRange(plan2ExtractionInformationsAtDestination); using (var con = _destinationDatabase.Server.GetConnection()) { con.Open(); var cmd = _destinationDatabase.Server.GetCommand(qb.SQL, con); Assert.DoesNotThrow(() => cmd.ExecuteNonQuery()); } Console.WriteLine("Final migrated combo dataset SQL was:" + qb.SQL); Assert.IsTrue(_comboCata.CatalogueItems.Any(ci => ci.Name.Equals("Measuree"))); Assert.IsTrue(engine2.NewCatalogue.CatalogueItems.Any(ci => ci.Name.Equals("Measuree")), "ANO Catalogue did not respect the original CatalogueItem Name"); }