public ICleanup(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) { _sqlConn = sqlConn; _metaAPI = MetaAPI; _dataAPI = DataAPI; _config = Configurations; }
static void Main(string[] args) { DataMoverLog.InfoAsync("Starting the application"); MigrationConfiguration configuration = null; if (args.Length > 0) { DataMoverLog.DebugAsync("Received parameters:"); for (var i = 0; i < args.Length; i++) { DataMoverLog.DebugAsync($"{i} - {args[i]}"); } configuration = ConfigurationLoader.LoadConfiguration(args); } if (configuration == null) { DataMoverLog.ErrorAsync("Missing configuration."); PrintUsage(); } else { var stream = System.IO.File.Open(configuration.Files[0].Path, FileMode.Open, FileAccess.Read, FileShare.Read); var loader = new CsvLoader(configuration.Files[0].Name); foreach (var l in loader.ReadLines(stream)) { } } Task.Delay(TimeSpan.FromMilliseconds(500)).Wait(); DataMoverLog.Terminate(); }
public override void Before(MethodInfo methodUnderTest) { Database.Delete("DataContext"); var configuration = new MigrationConfiguration(); var migrator = new DbSeederMigrator<DataContext>(configuration); migrator.MigrateToLatestVersion(); }
public override void Before(MethodInfo methodUnderTest) { Database.Delete("DataContext"); var configuration = new MigrationConfiguration(); var migrator = new DbSeederMigrator <DataContext>(configuration); migrator.MigrateToLatestVersion(); }
protected virtual void BeforeEachTest() { _application = new TestUmbracoApplication(); _application.Start(); MigrationRecords = new DatabaseMigrationRecordRepository(_application.ApplicationContext.DatabaseContext.Database); MigrationConfiguration = new MigrationConfiguration(); _migrator = CreateMigrator(); }
public MockedDBContext() { SetUnique <EmailAddressDO, String>(ea => ea.Address); SetUnique <Fr8AccountDO, int?>(u => u.EmailAddressID); SetPrimaryKey <Fr8AccountDO, String>(u => u.Id); MigrationConfiguration.SeedIntoMockDb(new UnitOfWork(this, ObjectFactory.Container)); }
internal ApplicationConfiguration() { this._extensibility = new ExtensibilityConfiguration(); this.IgnoreSslErrors = true; this._customInstallers = new List <IWindsorInstaller>(); this._database = this.Register <DatabaseConfiguration>(() => new DatabaseConfiguration(this)); this._tasks = this.Register <TasksConfiguration>(() => new TasksConfiguration(this)); this._logging = this.Register <LoggingConfiguration>(() => new LoggingConfiguration(this)); this._migration = this.Register <MigrationConfiguration>(() => new MigrationConfiguration(this)); this._hosts = this.Register <HostsConfiguration>(() => new HostsConfiguration(this)); this._advanced = this.Register <AdvancedConfiguration>(() => new AdvancedConfiguration(this)); this.Register <ApplicationConfiguration>(() => this); }
public static MigrationConfiguration AddUCommerceFromNamespaceOfThis <T>(this MigrationConfiguration migration, DatabaseServer db, string identifyingName = null) where T : Migration { if (migration == null) { throw new ArgumentNullException(nameof(migration)); } migration.Application .UseUCommerce(uCommerce => uCommerce .AddMigrationFromNamespaceOfThis <T>(db, identifyingName)); return(migration); }
internal ApplicationConfiguration() { _extensibility = new ExtensibilityConfiguration(); _database = Register(() => new DatabaseConfiguration(this)); _services = Register(() => new ServicesConfiguration(this)); _hosts = Register(() => new HostsConfiguration(this)); _tasks = Register(() => new TasksConfiguration(this)); _logging = Register(() => new LoggingConfiguration(this)); _migration = Register(() => new MigrationConfiguration(this)); _environment = Register(() => new EnvironmentConfiguration(this)); Register(() => this); }
public void ParseConfiguration_WithNoDBConnection_ShouldThrowInvalidConfigurationException() { var json = @" { ""sql_files_directories"": [ ""/your/directory/contains/sql"", ""../another/directory/contains/sql"" ], ""log_output"": ""../directory/output"" } "; var exception = Should.Throw <InvalidConfigurationException>(() => MigrationConfiguration.ParseConfiguration(json)); exception.Message.ShouldBe("db_connection configuration was not found."); }
public void ParseConfiguration_WithNoSqlFilesDirectories_ShouldThrowInvalidConfigurationException() { var json = @" { ""db_connection"": { ""type"": ""mysql"", ""connection_string"": ""Server=HOST_NAME;Database=DB_NAME;Uid=USER_ID;Pwd=PASSWORD"" }, ""log_output"": ""../directory/output"" } "; var exception = Should.Throw <InvalidConfigurationException>(() => MigrationConfiguration.ParseConfiguration(json)); exception.Message.ShouldBe("sql_files_directories configuration was not found."); }
private RavenConfiguration(string resourceName, ResourceType resourceType, string customConfigPath = null, bool skipEnvironmentVariables = false) { _logger = LoggingSource.Instance.GetLogger <RavenConfiguration>(resourceName); ResourceName = resourceName; ResourceType = resourceType; _customConfigPath = customConfigPath; PathSettingBase <string> .ValidatePath(_customConfigPath); _configBuilder = new ConfigurationBuilder(); if (skipEnvironmentVariables == false) { AddEnvironmentVariables(); } AddJsonConfigurationVariables(customConfigPath); Settings = _configBuilder.Build(); Core = new CoreConfiguration(); Http = new HttpConfiguration(); Replication = new ReplicationConfiguration(); Cluster = new ClusterConfiguration(); Etl = new EtlConfiguration(); Storage = new StorageConfiguration(); Security = new SecurityConfiguration(); Backup = new BackupConfiguration(); PerformanceHints = new PerformanceHintsConfiguration(); Indexing = new IndexingConfiguration(this); Monitoring = new MonitoringConfiguration(); Queries = new QueryConfiguration(); Patching = new PatchingConfiguration(); Logs = new LogsConfiguration(); Server = new ServerConfiguration(); Embedded = new EmbeddedConfiguration(); Databases = new DatabaseConfiguration(Storage.ForceUsing32BitsPager); Memory = new MemoryConfiguration(); Studio = new StudioConfiguration(); Licensing = new LicenseConfiguration(); Tombstones = new TombstoneConfiguration(); Subscriptions = new SubscriptionsConfiguration(); TransactionMergerConfiguration = new TransactionMergerConfiguration(Storage.ForceUsing32BitsPager); Notifications = new NotificationsConfiguration(); Updates = new UpdatesConfiguration(); Migration = new MigrationConfiguration(); Integrations = new IntegrationsConfiguration(); }
public MigrationContext( [NotNull] IServiceContext services, [NotNull] UmbracoDatabase database, [NotNull] CacheHelper cacheHelper, [NotNull] IMigrationRecordRepository migrationRecords, [NotNull] MigrationConfiguration configuration, [CanBeNull] IMigrationLogger logger = null ) { Services = Argument.NotNull(nameof(services), services); Database = Argument.NotNull(nameof(database), database); MigrationRecords = Argument.NotNull(nameof(migrationRecords), migrationRecords); Configuration = Argument.NotNull(nameof(configuration), configuration); Logger = logger; _cacheHelper = cacheHelper; }
public void ParseConfiguration_WithConnectionString_ShouldThrowInvalidConfigurationException() { var json = @" { ""sql_files_directories"": [ ""/your/directory/contains/sql"", ""../another/directory/contains/sql"" ], ""db_connection"": { ""type"": ""mysql"" }, ""log_output"": ""../directory/output"" } "; var exception = Should.Throw <InvalidConfigurationException>(() => MigrationConfiguration.ParseConfiguration(json)); exception.Message.ShouldBe("db_connection.connection_string configuration was not found or has invalid value."); }
public ExitCodeType Mutilate(IDataLoadJob listener) { if (TimePeriodicityField == null) { throw new InvalidOperationException("TimePeriodicityField has not been set."); } var liveDatabaseInfo = GetLiveDatabaseInfo(); if (TestContext) { // If we are operating inside a test, the client is responsible for providing a TableNamingScheme if (TableNamingScheme == null) { throw new InvalidOperationException("Executing within test context but no TableNamingScheme has been provided"); } } else { // If we are not operating inside a Test, hardwire the TableNamingScheme TableNamingScheme = new FixedStagingDatabaseNamer(liveDatabaseInfo.GetRuntimeName()); } // create invariant helpers _sqlHelper = new BackfillSqlHelper(TimePeriodicityField, _dbInfo, liveDatabaseInfo); _migrationConfiguration = new MigrationConfiguration(liveDatabaseInfo, LoadBubble.Live, LoadBubble.Staging, TableNamingScheme); // starting with the TimePeriodicity table, we descend the join relationships to the leaf tables then ascend back up to the TimePeriodicity table // at each step we determine the effective date of the record by joining back to the TimePeriodicity table // this allows us to remove updates that are older than the corresponding record in live // - however we don't remove rows that still have children, hence the recursion from leaves upwards // -- a record may be an 'old update', but have a child for insertion (i.e. the child is not in live), in this case the parent must remain in staging despite it being 'old' // - 'old updates' that are not deleted (because they have new descendants) must have their own data updated to reflect what is in live if there is a difference between the two, otherwise we may overwrite live with stale data // _tiWithTimeColumn = TimePeriodicityField.TableInfo; ProcessOldUpdatesInTable(_tiWithTimeColumn, new List <JoinInfo>()); // Having processed all descendants of the TimePeriodicity table, we now recursively ascend back up through its predecessors to the top of the join tree // Doing effectively the same thing, removing items that are older than the corresponding live items that do not also have new descendants and updating staging rows with live data where required. ProcessPredecessors(_tiWithTimeColumn, new List <JoinInfo>()); return(ExitCodeType.Success); }
public void ParseConfiguration_WithInvalidSqlDirectoriesValue_ShouldThrowInvalidConfigurationException() { var json = @" { ""sql_files_directories"": [ ""/your/directory/contains/sql"", """" ], ""db_connection"": { ""type"": ""mysql"", ""connection_string"": ""Server=HOST_NAME;Database=DB_NAME;Uid=USER_ID;Pwd=PASSWORD"" }, ""log_output"": ""../directory/output"" } "; var exception = Should.Throw <InvalidConfigurationException>(() => MigrationConfiguration.ParseConfiguration(json)); exception.Message.ShouldBe("sql_files_directories has invalid value."); }
public void ParseConfiguration_WithValidConfigurationJson_CreatesCorrectConfigurationObject() { var expected = new MigrationConfiguration { SqlFilesDirectories = new List <string> { "/your/directory/contains/sql", "../another/directory/contains/sql" }, DbConnection = new ConnectionConfiguration { Type = SqlType.MySql, ConnectionString = "Server=HOST_NAME;Database=DB_NAME;Uid=USER_ID;Pwd=PASSWORD" }, LogOutputDirectory = "../directory/output" }; var json = @" { ""sql_files_directories"": [ ""/your/directory/contains/sql"", ""../another/directory/contains/sql"" ], ""db_connection"": { ""type"": ""mysql"", ""connection_string"": ""Server=HOST_NAME;Database=DB_NAME;Uid=USER_ID;Pwd=PASSWORD"" }, ""log_output_directory"": ""../directory/output"" } "; var configuration = MigrationConfiguration.ParseConfiguration(json); configuration.SqlFilesDirectories.Count.ShouldBe(expected.SqlFilesDirectories.Count); foreach (var directory in configuration.SqlFilesDirectories) { expected.SqlFilesDirectories.ShouldContain(directory); } configuration.DbConnection.Type.ShouldBe(expected.DbConnection.Type); configuration.DbConnection.ConnectionString.ShouldBe(expected.DbConnection.ConnectionString); configuration.LogOutputDirectory.ShouldBe(expected.LogOutputDirectory); }
public KernelEventCompletedArguments Execute() { // Hhack to disable migrating when visual studio creates migrations. // This is due to the fact that EntityFramework uses some static information and this // application loads everyting upfront if (!System.AppDomain.CurrentDomain.SetupInformation.AppDomainManagerAssembly.Contains( "Microsoft.VisualStudio.Platform.AppDomainManager")) { var configuration = new MigrationConfiguration(); var migrator = new DbSeederMigrator <DataContext>(configuration); try { migrator.MigrateToLatestVersion(); } catch (System.Exception ex) { return(new KernelEventCompletedArguments { AllowContinue = false, KernelEventSucceeded = false, Issues = new IError[] { new GenericError { ErrorMessage = "Could not migrate to latest version.", ErrorException = ex, Severity = ErrorSeverity.Critical } } }); } } return(new KernelEventCompletedArguments() { AllowContinue = true, KernelEventSucceeded = true, Issues = null }); }
public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) { if (Skip(job)) { return(ExitCodeType.Error); } //if(_migrationHost != null) // throw new Exception("Load stage already started once"); // After the user-defined load process, the framework handles the insert into staging and resolves any conflicts var stagingDbInfo = _databaseConfiguration.DeployInfo[LoadBubble.Staging]; var liveDbInfo = _databaseConfiguration.DeployInfo[LoadBubble.Live]; job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Migrating '" + stagingDbInfo + "' to '" + liveDbInfo + "'")); var migrationConfig = new MigrationConfiguration(stagingDbInfo, LoadBubble.Staging, LoadBubble.Live, _databaseConfiguration.DatabaseNamer); var migrationHost = new MigrationHost(stagingDbInfo, liveDbInfo, migrationConfig, _databaseConfiguration); migrationHost.Migrate(job, cancellationToken); return(ExitCodeType.Success); }
public KernelEventCompletedArguments Execute() { // Hhack to disable migrating when visual studio creates migrations. // This is due to the fact that EntityFramework uses some static information and this // application loads everyting upfront if (!System.AppDomain.CurrentDomain.SetupInformation.AppDomainManagerAssembly.Contains( "Microsoft.VisualStudio.Platform.AppDomainManager")) { var configuration = new MigrationConfiguration(); var migrator = new DbSeederMigrator<DataContext>(configuration); try { migrator.MigrateToLatestVersion(); } catch (System.Exception ex) { return new KernelEventCompletedArguments { AllowContinue = false, KernelEventSucceeded = false, Issues = new IError[] { new GenericError { ErrorMessage = "Could not migrate to latest version.", ErrorException = ex, Severity = ErrorSeverity.Critical } } }; } } return new KernelEventCompletedArguments() { AllowContinue = true, KernelEventSucceeded = true, Issues = null }; }
public ExportCustomFields(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations, string InternalAssetType) : base(sqlConn, MetaAPI, DataAPI, Configurations) { _InternalAssetType = InternalAssetType; }
public ContactMapper(MigrationConfiguration config) : base(config) { }
public ImportFeatureGroups(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { }
public ExportMemberGroups(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { }
public ImportRegressionTests(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { }
public CleanupTasks(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { }
public SiteMapper(MigrationConfiguration config) : base(config) { }
public static async Task ConfigureMigrationsAsync(IServiceProvider serviceProvider) => await MigrationConfiguration.ConfigureMigrationsAsync(serviceProvider);
public void TestMerge(DatabaseType databaseType) { //microsoft one gets called for free in test setup (see base class) if (databaseType != DatabaseType.MicrosoftSQLServer) { SetupFromTo(databaseType); } var dt = new DataTable(); var colName = new DataColumn("Name", typeof(string)); var colAge = new DataColumn("Age", typeof(int)); dt.Columns.Add(colName); dt.Columns.Add(colAge); dt.Columns.Add("Postcode", typeof(string)); //Data in live awaiting toTbl be updated dt.Rows.Add(new object[] { "Dave", 18, "DD3 1AB" }); dt.Rows.Add(new object[] { "Dave", 25, "DD1 1XS" }); dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); dt.Rows.Add(new object[] { "Filli", 32, "DD3 78L" }); dt.Rows.Add(new object[] { "Mandrake", 32, DBNull.Value }); dt.PrimaryKey = new[] { colName, colAge }; var toTbl = To.CreateTable("ToTable", dt); Assert.IsTrue(toTbl.DiscoverColumn("Name").IsPrimaryKey); Assert.IsTrue(toTbl.DiscoverColumn("Age").IsPrimaryKey); Assert.IsFalse(toTbl.DiscoverColumn("Postcode").IsPrimaryKey); dt.Rows.Clear(); //new data being loaded dt.Rows.Add(new object[] { "Dave", 25, "DD1 1PS" }); //update toTbl change postcode toTbl "DD1 1PS" dt.Rows.Add(new object[] { "Chutney", 32, DBNull.Value }); //new insert Chutney dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); //ignored because already present in dataset dt.Rows.Add(new object[] { "Filli", 32, DBNull.Value }); //update from "DD3 78L" null dt.Rows.Add(new object[] { "Mandrake", 32, "DD1 1PS" }); //update from null toTbl "DD1 1PS" dt.Rows.Add(new object[] { "Mandrake", 31, "DD1 1PS" }); // insert because Age is unique (and part of pk) var fromTbl = From.CreateTable(DatabaseName + "_ToTable_STAGING", dt); //import the toTbl table as a TableInfo TableInfo ti; ColumnInfo[] cis; var cata = Import(toTbl, out ti, out cis); //put the backup trigger on the live table (this will also create the needed hic_ columns etc) var triggerImplementer = new TriggerImplementerFactory(databaseType).Create(toTbl); triggerImplementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); var configuration = new MigrationConfiguration(From, LoadBubble.Staging, LoadBubble.Live, new FixedStagingDatabaseNamer(toTbl.Database.GetRuntimeName(), fromTbl.Database.GetRuntimeName())); var lmd = new LoadMetadata(CatalogueRepository); cata.LoadMetadata_ID = lmd.ID; cata.SaveToDatabase(); var migrationHost = new MigrationHost(From, To, configuration, new HICDatabaseConfiguration(lmd)); //set SetUp a logging task var logServer = new ServerDefaults(CatalogueRepository).GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); var logManager = new LogManager(logServer); logManager.CreateNewLoggingTaskIfNotExists("CrossDatabaseMergeCommandTest"); var dli = logManager.CreateDataLoadInfo("CrossDatabaseMergeCommandTest", "tests", "running test", "", true); var job = new ThrowImmediatelyDataLoadJob(); job.DataLoadInfo = dli; job.RegularTablesToLoad = new List <ITableInfo>(new[] { ti }); migrationHost.Migrate(job, new GracefulCancellationToken()); var resultantDt = toTbl.GetDataTable(); Assert.AreEqual(7, resultantDt.Rows.Count); AssertRowEquals(resultantDt, "Dave", 25, "DD1 1PS"); AssertRowEquals(resultantDt, "Chutney", 32, DBNull.Value); AssertRowEquals(resultantDt, "Mango", 32, DBNull.Value); AssertRowEquals(resultantDt, "Filli", 32, DBNull.Value); AssertRowEquals(resultantDt, "Mandrake", 32, "DD1 1PS"); AssertRowEquals(resultantDt, "Mandrake", 31, "DD1 1PS"); AssertRowEquals(resultantDt, "Dave", 18, "DD3 1AB"); var archival = logManager.GetArchivalDataLoadInfos("CrossDatabaseMergeCommandTest", new CancellationToken()); var log = archival.First(); Assert.AreEqual(dli.ID, log.ID); Assert.AreEqual(2, log.TableLoadInfos.Single().Inserts); Assert.AreEqual(3, log.TableLoadInfos.Single().Updates); }
public TenentMapper(MigrationConfiguration config) : base(config) { }
public ExportAttachments(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, V1APIConnector ImageConnector, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { _imageConnector = ImageConnector; }
public SecurityContactMapper(MigrationConfiguration config) : base(config) { }
public ExportSchedules(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { }
public void ParseConfiguration_WithInvalidConfigurationJson_ShouldThrowInvalidConfigurationException() { var exception = Should.Throw <InvalidConfigurationException>(() => MigrationConfiguration.ParseConfiguration("invalid configuration")); exception.Message.ShouldBe("Invalid configuration file."); }
public ImportCustomFields(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations, string AssetType) : base(sqlConn, MetaAPI, DataAPI, Configurations) { _assetType = AssetType; }
public ImportPrograms(SqlConnection sqlConn, MetaModel MetaAPI, Services DataAPI, MigrationConfiguration Configurations) : base(sqlConn, MetaAPI, DataAPI, Configurations) { }
protected ConfigurableMapper(MigrationConfiguration config) { Configuration = config; }