public override void Execute() { base.Execute(); if (TargetFileInfo != null && IsSingleObject) { var d = _gatherer.GatherDependencies(_toExport[0]); var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); File.WriteAllText(TargetFileInfo.FullName, serial); return; } if (TargetDirectoryInfo == null) { throw new Exception("No output directory set"); } foreach (var o in _toExport) { var d = _gatherer.GatherDependencies(o); var filename = QuerySyntaxHelper.MakeHeaderNameSensible(o.ToString()) + ".sd"; var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); var f = Path.Combine(TargetDirectoryInfo.FullName, filename); File.WriteAllText(f, serial); } }
public void TestPlugin_OrphanImport_Sharing() { //Setup the load module we want to test (with plugin parent) var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah2." + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); var fi2 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah2." + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(fi2.FullName, new byte[] { 0x1, 0x2 }); var fi3 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah3." + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(fi3.FullName, new byte[] { 0x3, 0x4 }); Core.Curation.Data.Plugin p = new Core.Curation.Data.Plugin(Repository, fi, new Version(1, 1, 1), new Version(1, 1, 1, 1)); var lma = new LoadModuleAssembly(Repository, fi2, p); var lma2 = new LoadModuleAssembly(Repository, fi3, p); //gather dependencies of the plugin (plugin[0] + lma[1]) Gatherer g = new Gatherer(RepositoryLocator); ShareManager sm = new ShareManager(RepositoryLocator); var list = g.GatherDependencies(p).ToShareDefinitionWithChildren(sm); //Delete export definitions foreach (var e in Repository.GetAllObjects <ObjectExport>()) { e.DeleteInDatabase(); } //and delete pluing (CASCADE deletes lma too) p.DeleteInDatabase(); //import them var created = sm.ImportSharedObject(list).ToArray(); //There should be 3 Assert.AreEqual(3, created.Count()); Assert.AreEqual(3, Repository.GetAllObjects <ObjectImport>().Count()); lma2 = (LoadModuleAssembly)created[2]; //now delete lma2 only lma2.DeleteInDatabase(); Assert.AreEqual(2, Repository.GetAllObjects <ObjectImport>().Count()); //import them var created2 = sm.ImportSharedObject(list); //There should still be 3 Assert.AreEqual(3, created2.Count()); }
public void GatherAndShare_ExtractionFilter_Test() { //Setup some objects under Catalogue var cata = new Catalogue(CatalogueRepository, "Cata"); cata.Periodicity = Catalogue.CataloguePeriodicity.BiMonthly; cata.SaveToDatabase(); var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); var tableInfo = new TableInfo(CatalogueRepository, "Myt"); var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); catalogueItem1.ColumnInfo_ID = colInfo.ID; catalogueItem1.SaveToDatabase(); //Setup a Filter under this extractable column (the filter is what we will share) var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); var filter = new ExtractionFilter(CatalogueRepository, "My Filter", ei); filter.Description = "amagad"; filter.WhereSQL = "UPPER(C1) = @a"; //Give the filter a parameter @a just to make things interesting var declaration = filter.GetQuerySyntaxHelper().GetParameterDeclaration("@a", new DatabaseTypeRequest(typeof(string), 1)); var param = filter.GetFilterFactory().CreateNewParameter(filter, declaration); //Also create a 'known good value' set i.e. recommended value for the parameter to achive some goal (you can have multiple of these - this will not be shared) var set = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Fife"); var val = new ExtractionFilterParameterSetValue(CatalogueRepository, set, (ExtractionFilterParameter)param); val.Value = "'FISH'"; //Gather the dependencies (this is what we are testing) var gatherer = new Gatherer(RepositoryLocator); Assert.IsTrue(gatherer.CanGatherDependencies(filter)); var gathered = gatherer.GatherDependencies(filter); //gatherer should have gathered the filter and the parameter (but not the ExtractionFilterParameterSet sets) Assert.AreEqual(1, gathered.Children.Count); Assert.AreEqual(param, gathered.Children[0].Object); //Cleanup val.DeleteInDatabase(); set.DeleteInDatabase(); cata.DeleteInDatabase(); }
private LoadMetadata ShareToNewRepository(LoadMetadata lmd) { var gatherer = new Gatherer(RepositoryLocator); Assert.IsTrue(gatherer.CanGatherDependencies(lmd)); var rootObj = gatherer.GatherDependencies(lmd); var sm = new ShareManager(RepositoryLocator, null); var shareDefinition = rootObj.ToShareDefinitionWithChildren(sm); var repo2 = new MemoryDataExportRepository(); var sm2 = new ShareManager(new RepositoryProvider(repo2)); return(sm2.ImportSharedObject(shareDefinition).OfType <LoadMetadata>().Single()); }
public void GatherAndShare_Plugin_Test() { var f1 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Imaginary1" + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(f1.FullName, new byte[] { 0x1, 0x2 }); var plugin = new Core.Curation.Data.Plugin(CatalogueRepository, new FileInfo("Imaginary" + PackPluginRunner.PluginPackageSuffix), new System.Version(1, 1, 1), new System.Version(1, 1, 1)); var lma1 = new LoadModuleAssembly(CatalogueRepository, f1, plugin); Assert.AreEqual(lma1.Plugin_ID, plugin.ID); Gatherer g = new Gatherer(RepositoryLocator); Assert.IsTrue(g.CanGatherDependencies(plugin)); var gObj = g.GatherDependencies(plugin); //root should be the server Assert.AreEqual(gObj.Object, plugin); Assert.IsTrue(gObj.Children.Any(d => d.Object.Equals(lma1))); }
public void GatherAndShare_ANOTable_Test(bool goViaJson) { var anoserver = new ExternalDatabaseServer(CatalogueRepository, "MyGatherAndShareTestANOServer", new ANOStorePatcher()); var anoTable = new ANOTable(CatalogueRepository, anoserver, "ANOMagad", "N"); Assert.AreEqual(anoTable.Server_ID, anoserver.ID); Gatherer g = new Gatherer(RepositoryLocator); Assert.IsTrue(g.CanGatherDependencies(anoTable)); var gObj = g.GatherDependencies(anoTable); //root should be the server Assert.AreEqual(gObj.Object, anoserver); Assert.AreEqual(gObj.Children.Single().Object, anoTable); //get the sharing definitions var shareManager = new ShareManager(RepositoryLocator); ShareDefinition defParent = gObj.ToShareDefinition(shareManager, new List <ShareDefinition>()); ShareDefinition defChild = gObj.Children.Single().ToShareDefinition(shareManager, new List <ShareDefinition>(new [] { defParent })); //make it look like we never had it in the first place shareManager.GetNewOrExistingExportFor(anoserver).DeleteInDatabase(); shareManager.GetNewOrExistingExportFor(anoTable).DeleteInDatabase(); anoTable.DeleteInDatabase(); anoserver.DeleteInDatabase(); if (goViaJson) { var sParent = JsonConvertExtensions.SerializeObject(defParent, RepositoryLocator); var sChild = JsonConvertExtensions.SerializeObject(defChild, RepositoryLocator); defParent = (ShareDefinition)JsonConvertExtensions.DeserializeObject(sParent, typeof(ShareDefinition), RepositoryLocator); defChild = (ShareDefinition)JsonConvertExtensions.DeserializeObject(sChild, typeof(ShareDefinition), RepositoryLocator); } var anoserverAfter = new ExternalDatabaseServer(shareManager, defParent); Assert.IsTrue(anoserverAfter.Exists()); //new instance Assert.AreNotEqual(anoserverAfter.ID, anoserver.ID); //same properties Assert.AreEqual(anoserverAfter.Name, anoserver.Name); Assert.AreEqual(anoserverAfter.CreatedByAssembly, anoserver.CreatedByAssembly); Assert.AreEqual(anoserverAfter.Database, anoserver.Database); Assert.AreEqual(anoserverAfter.DatabaseType, anoserver.DatabaseType); Assert.AreEqual(anoserverAfter.Username, anoserver.Username); Assert.AreEqual(anoserverAfter.Password, anoserver.Password); var anoTableAfter = new ANOTable(shareManager, defChild); //new instance Assert.AreNotEqual(anoTableAfter.ID, anoTable.ID); Assert.AreNotEqual(anoTableAfter.Server_ID, anoTable.Server_ID); //same properties Assert.AreEqual(anoTableAfter.NumberOfCharactersToUseInAnonymousRepresentation, anoTable.NumberOfCharactersToUseInAnonymousRepresentation); Assert.AreEqual(anoTableAfter.Suffix, anoTable.Suffix); //change a property and save it anoTableAfter.Suffix = "CAMMELS!"; CatalogueRepository.SaveToDatabase(anoTableAfter); //anoTableAfter.SaveToDatabase(); <- this decides to go check the ANOTable exists on the server refernced which is immaginary btw >< thats why we have the above line instead //reimport (this time it should be an update, we import the share definitions and it overrdies our database copy (sharing is UPSERT) var anoTableAfter2 = new ANOTable(shareManager, defChild); Assert.AreEqual(anoTableAfter.ID, anoTableAfter2.ID); Assert.AreEqual("N", anoTableAfter2.Suffix); Assert.AreEqual(ChangeDescription.DatabaseCopyDifferent, anoTableAfter.HasLocalChanges().Evaluation); anoTableAfter.DeleteInDatabase(); anoserverAfter.DeleteInDatabase(); foreach (ObjectImport o in RepositoryLocator.CatalogueRepository.GetAllObjects <ObjectImport>()) { o.DeleteInDatabase(); } }
public void GatherAndShare_Catalogue_Test(bool goViaJson) { //Setup some objects under Catalogue that we can share var cata = new Catalogue(CatalogueRepository, "Cata"); cata.Periodicity = Catalogue.CataloguePeriodicity.BiMonthly; cata.SaveToDatabase(); var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); var catalogueItem2 = new CatalogueItem(CatalogueRepository, cata, "Ci2"); var tableInfo = new TableInfo(CatalogueRepository, "Myt"); var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); catalogueItem1.ColumnInfo_ID = colInfo.ID; catalogueItem1.SaveToDatabase(); var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); //the logging server has a system default so should have been populated Assert.IsNotNull(cata.LiveLoggingServer_ID); //Catalogue sharing should be allowed Gatherer g = new Gatherer(RepositoryLocator); Assert.IsTrue(g.CanGatherDependencies(cata)); //gather the objects depending on Catalogue as a tree var gObj = g.GatherDependencies(cata); Assert.AreEqual(2, gObj.Children.Count); //both cata items var lmd = new LoadMetadata(CatalogueRepository); cata.LoadMetadata_ID = lmd.ID; cata.SaveToDatabase(); //get the share definition var shareManager = new ShareManager(RepositoryLocator); var shareDefinition = gObj.ToShareDefinitionWithChildren(shareManager); if (goViaJson) { var json = shareDefinition.Select(s => JsonConvertExtensions.SerializeObject(s, RepositoryLocator)).ToList(); shareDefinition = json.Select( j => JsonConvertExtensions.DeserializeObject(j, typeof(ShareDefinition), RepositoryLocator)) .Cast <ShareDefinition>() .ToList(); } //make a local change cata.Name = "fishfish"; cata.SubjectNumbers = "123"; cata.LoadMetadata_ID = null; cata.Periodicity = Catalogue.CataloguePeriodicity.Unknown; cata.SaveToDatabase(); lmd.DeleteInDatabase(); //import the saved copy shareManager.ImportSharedObject(shareDefinition); //revert the memory copy and check it got overwritten with the original saved values cata.RevertToDatabaseState(); Assert.AreEqual("Cata", cata.Name); var exports = CatalogueRepository.GetAllObjects <ObjectExport>(); Assert.IsTrue(exports.Any()); //now delete and report foreach (var d in exports) { d.DeleteInDatabase(); } //make a local change including Name cata.Name = "fishfish"; cata.SaveToDatabase(); //test importing the Catalogue properties only shareManager.ImportPropertiesOnly(cata, shareDefinition[0]); //import the defined properties but not name Assert.AreEqual("fishfish", cata.Name); Assert.AreEqual(Catalogue.CataloguePeriodicity.BiMonthly, cata.Periodicity); //reset this though Assert.IsNull(cata.LoadMetadata_ID); cata.SaveToDatabase(); cata.DeleteInDatabase(); //none of these should now exist thanks to cascade deletes Assert.IsFalse(cata.Exists()); Assert.IsFalse(catalogueItem1.Exists()); Assert.IsFalse(catalogueItem2.Exists()); //import the saved copy var newObjects = shareManager.ImportSharedObject(shareDefinition).ToArray(); Assert.AreEqual("Cata", ((Catalogue)newObjects[0]).Name); Assert.AreEqual("Ci1", ((CatalogueItem)newObjects[1]).Name); Assert.AreEqual("Ci2", ((CatalogueItem)newObjects[2]).Name); }
public override void Execute() { base.Execute(); if (IsSingleObject) { //Extract a single object (to file) if (TargetFileInfo == null && BasicActivator.IsInteractive) { TargetFileInfo = BasicActivator.SelectFile("Path to output share definition to", "Share Definition", "*.sd"); if (TargetFileInfo == null) { return; } } } else { if (TargetDirectoryInfo == null && BasicActivator.IsInteractive) { TargetDirectoryInfo = BasicActivator.SelectDirectory("Output Directory"); if (TargetDirectoryInfo == null) { return; } } } if (TargetFileInfo != null && IsSingleObject) { var d = _gatherer.GatherDependencies(_toExport[0]); var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); File.WriteAllText(TargetFileInfo.FullName, serial); return; } if (TargetDirectoryInfo == null) { throw new Exception("No output directory set"); } foreach (var o in _toExport) { var d = _gatherer.GatherDependencies(o); var filename = QuerySyntaxHelper.MakeHeaderNameSensible(o.ToString()) + ".sd"; var shareDefinitions = d.ToShareDefinitionWithChildren(_shareManager); string serial = JsonConvertExtensions.SerializeObject(shareDefinitions, _repositoryLocator); var f = Path.Combine(TargetDirectoryInfo.FullName, filename); File.WriteAllText(f, serial); } if (ShowInExplorer && TargetDirectoryInfo != null) { UsefulStuff.GetInstance().ShowFolderInWindowsExplorer(TargetDirectoryInfo); } }
public async void SendToAllRemotes <T>(T[] toSendAll, Action callback = null) where T : IMapsDirectlyToDatabaseTable { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Ready to send " + toSendAll.Length + " " + typeof(T).Name + " items to all remotes.")); var done = new Dictionary <string, int>(); foreach (var remoteRDMP in remotes) { listener.OnProgress(this, new ProgressEventArgs(remoteRDMP.Name, new ProgressMeasurement(0, ProgressType.Records, toSendAll.Length), new TimeSpan())); } var tasks = new List <Task>(); foreach (var remote in remotes) { done.Add(remote.Name, 0); foreach (var toSend in toSendAll) { if (!_gatherer.CanGatherDependencies(toSend)) { throw new Exception("Type " + typeof(T) + " is not supported yet by Gatherer and therefore cannot be shared"); } var share = _gatherer.GatherDependencies(toSend).ToShareDefinitionWithChildren(_shareManager); var json = JsonConvertExtensions.SerializeObject(share, _repositoryLocator); var handler = new HttpClientHandler() { Credentials = new NetworkCredential(remote.Username, remote.GetDecryptedPassword()) }; HttpResponseMessage result; var apiUrl = remote.GetUrlFor <T>(); RemoteRDMP remote1 = remote; T toSend1 = toSend; var sender = new Task(() => { using (var client = new HttpClient(handler)) { try { result = client.PostAsync(new Uri(apiUrl), new StringContent(json, Encoding.UTF8, "text/plain")).Result; if (result.IsSuccessStatusCode) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Sending " + toSend1 + " to " + remote1.Name + " completed.")); } else { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error sending " + toSend1 + " to " + remote1.Name + ": " + result.ReasonPhrase + " - " + result.Content.ReadAsStringAsync().Result)); } lock (done) { listener.OnProgress(this, new ProgressEventArgs(remote1.Name, new ProgressMeasurement(++done[remote1.Name], ProgressType.Records, toSendAll.Length), new TimeSpan())); } } catch (Exception ex) { listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Error sending " + toSend1 + " to " + remote1.Name, ex)); listener.OnProgress(this, new ProgressEventArgs(remote1.Name, new ProgressMeasurement(1, ProgressType.Records, 1), new TimeSpan())); } } }); sender.Start(); tasks.Add(sender); } } await Task.WhenAll(tasks); if (callback != null) { callback(); } }