public void TestPlugin_OrphanImport_Sharing() { //Setup the load module we want to test (with plugin parent) var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah2." + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); var fi2 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah2." + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(fi2.FullName, new byte[] { 0x1, 0x2 }); var fi3 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah3." + PackPluginRunner.PluginPackageSuffix)); File.WriteAllBytes(fi3.FullName, new byte[] { 0x3, 0x4 }); Core.Curation.Data.Plugin p = new Core.Curation.Data.Plugin(Repository, fi, new Version(1, 1, 1), new Version(1, 1, 1, 1)); var lma = new LoadModuleAssembly(Repository, fi2, p); var lma2 = new LoadModuleAssembly(Repository, fi3, p); //gather dependencies of the plugin (plugin[0] + lma[1]) Gatherer g = new Gatherer(RepositoryLocator); ShareManager sm = new ShareManager(RepositoryLocator); var list = g.GatherDependencies(p).ToShareDefinitionWithChildren(sm); //Delete export definitions foreach (var e in Repository.GetAllObjects <ObjectExport>()) { e.DeleteInDatabase(); } //and delete pluing (CASCADE deletes lma too) p.DeleteInDatabase(); //import them var created = sm.ImportSharedObject(list).ToArray(); //There should be 3 Assert.AreEqual(3, created.Count()); Assert.AreEqual(3, Repository.GetAllObjects <ObjectImport>().Count()); lma2 = (LoadModuleAssembly)created[2]; //now delete lma2 only lma2.DeleteInDatabase(); Assert.AreEqual(2, Repository.GetAllObjects <ObjectImport>().Count()); //import them var created2 = sm.ImportSharedObject(list); //There should still be 3 Assert.AreEqual(3, created2.Count()); }
public override void Execute() { base.Execute(); OpenFileDialog ofd = new OpenFileDialog(); ofd.Filter = "Sharing Definition File (*.sd)|*.sd"; ofd.Multiselect = true; if (ofd.ShowDialog() == DialogResult.OK) { try { ShareManager shareManager = new ShareManager(Activator.RepositoryLocator); shareManager.LocalReferenceGetter = LocalReferenceGetter; foreach (var f in ofd.FileNames) { using (var stream = File.Open(f, FileMode.Open)) shareManager.ImportSharedObject(stream); } } catch (Exception e) { ExceptionViewer.Show("Error importing file(s)", e); } } }
public override void Execute() { base.Execute(); var selected = BasicActivator.SelectFiles("Select ShareDefinitions to import", "Sharing Definition File", "*.sd"); if (selected != null && selected.Any()) { try { ShareManager shareManager = new ShareManager(BasicActivator.RepositoryLocator); shareManager.LocalReferenceGetter = LocalReferenceGetter; foreach (var f in selected) { using (var stream = File.Open(f.FullName, FileMode.Open)) shareManager.ImportSharedObject(stream); } } catch (Exception e) { BasicActivator.ShowException("Error importing file(s)", e); } } }
public ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) { int imported = 0; try { var shareManager = new ShareManager(job.RepositoryLocator); foreach (var shareDefinitionFile in job.LoadDirectory.ForLoading.EnumerateFiles("*.sd")) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Found '" + shareDefinitionFile.Name + "'")); using (var stream = File.Open(shareDefinitionFile.FullName, FileMode.Open)) shareManager.ImportSharedObject(stream); imported++; job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Imported '" + shareDefinitionFile.Name + "' Succesfully")); } } catch (SharingException ex) { job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "Error occured importing ShareDefinitions", ex)); } job.OnNotify(this, new NotifyEventArgs(imported == 0 ? ProgressEventType.Warning : ProgressEventType.Information, "Imported " + imported + " ShareDefinition files")); return(ExitCodeType.Success); }
private LoadMetadata ShareToNewRepository(LoadMetadata lmd) { var gatherer = new Gatherer(RepositoryLocator); Assert.IsTrue(gatherer.CanGatherDependencies(lmd)); var rootObj = gatherer.GatherDependencies(lmd); var sm = new ShareManager(RepositoryLocator, null); var shareDefinition = rootObj.ToShareDefinitionWithChildren(sm); var repo2 = new MemoryDataExportRepository(); var sm2 = new ShareManager(new RepositoryProvider(repo2)); return(sm2.ImportSharedObject(shareDefinition).OfType <LoadMetadata>().Single()); }
public override void Execute() { base.Execute(); var db = SelectDatabase(false, "Import all Tables form Database..."); if (db == null) { return; } ShareManager shareManager = new ShareManager(Activator.RepositoryLocator, LocalReferenceGetter); List <ICatalogue> catalogues = new List <ICatalogue>(); //don't do any double importing! var existing = Activator.RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>(); var ignoredTables = new List <TableInfo>(); if (YesNo("Would you also like to import ShareDefinitions (metadata)?", "Import Metadata From File(s)")) { OpenFileDialog ofd = new OpenFileDialog() { Multiselect = true }; ofd.Filter = "Share Definitions|*.sd"; if (ofd.ShowDialog() == DialogResult.OK) { foreach (var f in ofd.FileNames) { using (var stream = File.Open(f, FileMode.Open)) { var newObjects = shareManager.ImportSharedObject(stream); if (newObjects != null) { catalogues.AddRange(newObjects.OfType <ICatalogue>()); } } } } } bool generateCatalogues = false; if (YesNo("Would you like to try to guess non-matching Catalogues by Name?", "Guess by name")) { catalogues.AddRange(Activator.RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()); } else if (YesNo("Would you like to generate empty Catalogues for non-matching tables instead?", "Generate New Catalogues")) { generateCatalogues = true; } var married = new Dictionary <CatalogueItem, ColumnInfo>(); TableInfo anyNewTable = null; foreach (DiscoveredTable discoveredTable in db.DiscoverTables(includeViews: false)) { var collide = existing.FirstOrDefault(t => t.Is(discoveredTable)); if (collide != null) { ignoredTables.Add(collide); continue; } var importer = new TableInfoImporter(Activator.RepositoryLocator.CatalogueRepository, discoveredTable); TableInfo ti; ColumnInfo[] cis; //import the table importer.DoImport(out ti, out cis); anyNewTable = anyNewTable ?? ti; //find a Catalogue of the same name (possibly imported from Share Definition) var matchingCatalogues = catalogues.Where(c => c.Name.Equals(ti.GetRuntimeName(), StringComparison.CurrentCultureIgnoreCase)).ToArray(); //if there's 1 Catalogue with the same name if (matchingCatalogues.Length == 1) { //we know we want to import all these ColumnInfos var unmatched = new List <ColumnInfo>(cis); //But hopefully most already have orphan CatalogueItems we can hook them together to foreach (var cataItem in matchingCatalogues[0].CatalogueItems) { if (cataItem.ColumnInfo_ID == null) { var matches = cataItem.GuessAssociatedColumn(cis, allowPartial: false).ToArray(); if (matches.Length == 1) { cataItem.SetColumnInfo(matches[0]); unmatched.Remove(matches[0]); //we married them together married.Add(cataItem, matches[0]); } } } //is anyone unmarried? i.e. new ColumnInfos that don't have CatalogueItems with the same name foreach (ColumnInfo columnInfo in unmatched) { var cataItem = new CatalogueItem(Activator.RepositoryLocator.CatalogueRepository, (Catalogue)matchingCatalogues[0], columnInfo.GetRuntimeName()); cataItem.ColumnInfo_ID = columnInfo.ID; cataItem.SaveToDatabase(); married.Add(cataItem, columnInfo); } } else if (generateCatalogues) { new ForwardEngineerCatalogue(ti, cis).ExecuteForwardEngineering(); } } if (married.Any() && YesNo("Found " + married.Count + " columns, make them all extractable?", "Make Extractable")) { foreach (var kvp in married) { //yup thats how we roll, the database is main memory! var ei = new ExtractionInformation(Activator.RepositoryLocator.CatalogueRepository, kvp.Key, kvp.Value, kvp.Value.Name); } } if (ignoredTables.Any()) { WideMessageBox.Show("Ignored some tables", "Ignored " + ignoredTables.Count + " tables because they already existed as TableInfos:" + string.Join(Environment.NewLine, ignoredTables.Select(ti => ti.GetRuntimeName()))); } if (anyNewTable != null) { Publish(anyNewTable); Emphasise(anyNewTable); } }
public void GatherAndShare_Catalogue_Test(bool goViaJson) { //Setup some objects under Catalogue that we can share var cata = new Catalogue(CatalogueRepository, "Cata"); cata.Periodicity = Catalogue.CataloguePeriodicity.BiMonthly; cata.SaveToDatabase(); var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); var catalogueItem2 = new CatalogueItem(CatalogueRepository, cata, "Ci2"); var tableInfo = new TableInfo(CatalogueRepository, "Myt"); var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); catalogueItem1.ColumnInfo_ID = colInfo.ID; catalogueItem1.SaveToDatabase(); var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); //the logging server has a system default so should have been populated Assert.IsNotNull(cata.LiveLoggingServer_ID); //Catalogue sharing should be allowed Gatherer g = new Gatherer(RepositoryLocator); Assert.IsTrue(g.CanGatherDependencies(cata)); //gather the objects depending on Catalogue as a tree var gObj = g.GatherDependencies(cata); Assert.AreEqual(2, gObj.Children.Count); //both cata items var lmd = new LoadMetadata(CatalogueRepository); cata.LoadMetadata_ID = lmd.ID; cata.SaveToDatabase(); //get the share definition var shareManager = new ShareManager(RepositoryLocator); var shareDefinition = gObj.ToShareDefinitionWithChildren(shareManager); if (goViaJson) { var json = shareDefinition.Select(s => JsonConvertExtensions.SerializeObject(s, RepositoryLocator)).ToList(); shareDefinition = json.Select( j => JsonConvertExtensions.DeserializeObject(j, typeof(ShareDefinition), RepositoryLocator)) .Cast <ShareDefinition>() .ToList(); } //make a local change cata.Name = "fishfish"; cata.SubjectNumbers = "123"; cata.LoadMetadata_ID = null; cata.Periodicity = Catalogue.CataloguePeriodicity.Unknown; cata.SaveToDatabase(); lmd.DeleteInDatabase(); //import the saved copy shareManager.ImportSharedObject(shareDefinition); //revert the memory copy and check it got overwritten with the original saved values cata.RevertToDatabaseState(); Assert.AreEqual("Cata", cata.Name); var exports = CatalogueRepository.GetAllObjects <ObjectExport>(); Assert.IsTrue(exports.Any()); //now delete and report foreach (var d in exports) { d.DeleteInDatabase(); } //make a local change including Name cata.Name = "fishfish"; cata.SaveToDatabase(); //test importing the Catalogue properties only shareManager.ImportPropertiesOnly(cata, shareDefinition[0]); //import the defined properties but not name Assert.AreEqual("fishfish", cata.Name); Assert.AreEqual(Catalogue.CataloguePeriodicity.BiMonthly, cata.Periodicity); //reset this though Assert.IsNull(cata.LoadMetadata_ID); cata.SaveToDatabase(); cata.DeleteInDatabase(); //none of these should now exist thanks to cascade deletes Assert.IsFalse(cata.Exists()); Assert.IsFalse(catalogueItem1.Exists()); Assert.IsFalse(catalogueItem2.Exists()); //import the saved copy var newObjects = shareManager.ImportSharedObject(shareDefinition).ToArray(); Assert.AreEqual("Cata", ((Catalogue)newObjects[0]).Name); Assert.AreEqual("Ci1", ((CatalogueItem)newObjects[1]).Name); Assert.AreEqual("Ci2", ((CatalogueItem)newObjects[2]).Name); }
public override void Execute() { base.Execute(); var db = SelectDatabase(false, "Import all Tables from Database..."); if (db == null) { return; } ShareManager shareManager = new ShareManager(BasicActivator.RepositoryLocator, LocalReferenceGetter); List <ICatalogue> catalogues = new List <ICatalogue>(); //don't do any double importing! var existing = BasicActivator.RepositoryLocator.CatalogueRepository.GetAllObjects <TableInfo>(); if (YesNo("Would you also like to import ShareDefinitions (metadata)?", "Import Metadata From File(s)")) { var chosen = BasicActivator.SelectFiles("Share Definition Files", "Share Definitions", "*.sd"); if (chosen != null) { foreach (var f in chosen) { using (var stream = File.Open(f.FullName, FileMode.Open)) { var newObjects = shareManager.ImportSharedObject(stream); if (newObjects != null) { catalogues.AddRange(newObjects.OfType <ICatalogue>()); } } } } } bool generateCatalogues = false; if (YesNo("Would you like to try to guess non-matching Catalogues by Name?", "Guess by name")) { catalogues.AddRange(BasicActivator.RepositoryLocator.CatalogueRepository.GetAllObjects <Catalogue>()); } else if (YesNo("Would you like to generate empty Catalogues for non-matching tables instead?", "Generate New Catalogues")) { generateCatalogues = true; } var married = new Dictionary <CatalogueItem, ColumnInfo>(); ITableInfo anyNewTable = null; List <DiscoveredTable> novel = new List <DiscoveredTable>(); foreach (DiscoveredTable discoveredTable in db.DiscoverTables(includeViews: false)) { var collide = existing.FirstOrDefault(t => t.Is(discoveredTable)); if (collide == null) { novel.Add(discoveredTable); } } if (!BasicActivator.SelectObjects("Import", novel.ToArray(), out DiscoveredTable[] selected))
public Curation.Data.Plugin Import(Stream stream) { return(_shareManager.ImportSharedObject(stream, deleteExisting: true).OfType <Curation.Data.Plugin>().Single()); }