/// <summary> /// Artificial constructor called when the plugin is loaded /// Uses the obsolete mysql_connection.ini if connect string is empty. /// </summary> /// <param name="connect">connect string</param> public void Initialise(string connect) { if (connect != String.Empty) { database = new MySQLManager(connect); } else { m_log.Warn("Using deprecated mysql_connection.ini. Please update database_connect in GridServer_Config.xml and we'll use that instead"); IniFile GridDataMySqlFile = new IniFile("mysql_connection.ini"); string settingHostname = GridDataMySqlFile.ParseFileReadValue("hostname"); string settingDatabase = GridDataMySqlFile.ParseFileReadValue("database"); string settingUsername = GridDataMySqlFile.ParseFileReadValue("username"); string settingPassword = GridDataMySqlFile.ParseFileReadValue("password"); string settingPooling = GridDataMySqlFile.ParseFileReadValue("pooling"); string settingPort = GridDataMySqlFile.ParseFileReadValue("port"); database = new MySQLManager(settingHostname, settingDatabase, settingUsername, settingPassword, settingPooling, settingPort); } // This actually does the roll forward assembly stuff Assembly assem = GetType().Assembly; Migration m = new Migration(database.Connection, assem, "LogStore"); // TODO: After rev 6000, remove this. People should have // been rolled onto the new migration code by then. TestTables(m); m.Update(); }
public void Initialise(string connectionString) { m_connectionString = connectionString; try { m_log.Info("[REGION DB]: MySql - connecting: " + Util.GetDisplayConnectionString(m_connectionString)); } catch (Exception e) { m_log.Debug("Exception: password not found in connection string\n" + e.ToString()); } GetWaitTimeout(); using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Assembly assem = GetType().Assembly; Migration m = new Migration(dbcon, assem, "EstateStore"); m.Update(); Type t = typeof(EstateSettings); m_Fields = t.GetFields(BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.DeclaredOnly); foreach (FieldInfo f in m_Fields) { if (f.Name.Substring(0, 2) == "m_") m_FieldMap[f.Name.Substring(2)] = f; } } }
/// <summary> /// Initialises the region datastore /// </summary> /// <param name="connectionString">The connection string.</param> public void Initialise(string connectionString) { m_connectionString = connectionString; _Database = new MSSQLManager(connectionString); using (SqlConnection conn = new SqlConnection(connectionString)) { conn.Open(); //New Migration settings Migration m = new Migration(conn, Assembly, "RegionStore"); m.Update(); // Clean dropped attachments // try { using (SqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = "delete from prims where prims.UUID in (select UUID from primshapes where PCode = 9 and State <> 0); delete from primshapes where PCode = 9 and State <> 0"; cmd.ExecuteNonQuery(); } } catch (Exception ex) { _Log.Error("[REGION DB]: Error cleaning up dropped attachments: " + ex.Message); } } }
public void Initialise(string connectionString) { m_connectionString = connectionString; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); // Apply new Migrations // Assembly assem = GetType().Assembly; Migration m = new Migration(dbcon, assem, "RegionStore"); m.Update(); // Clean dropped attachments // try { using (MySqlCommand cmd = dbcon.CreateCommand()) { cmd.CommandText = "delete from prims, primshapes using prims " + "left join primshapes on prims.uuid = primshapes.uuid " + "where PCode = 9 and State <> 0"; ExecuteNonQuery(cmd); } } catch (MySqlException ex) { m_log.Error("[REGION DB]: Error cleaning up dropped attachments: " + ex.Message); } } }
/// <param name="migration">Migration parameter</param> public void Add(Migration migration) { if (_migrations.Any(m => m.Type == migration.Type && m.Version == migration.Version)) throw new NotSupportedException("Migration versions for same type must differ"); _migrations.AddLast(migration); }
public void Init() { SuperInit(); // If we manage to connect to the database with the user // and password above it is our test database, and run // these tests. If anything goes wrong, ignore these // tests. try { database = new MySQLManager(connect); db = new MySQLGridData(); db.Initialise(connect); } catch (Exception e) { m_log.Error("Exception {0}", e); Assert.Ignore(); } // This actually does the roll forward assembly stuff Assembly assem = GetType().Assembly; using (MySqlConnection dbcon = new MySqlConnection(connect)) { dbcon.Open(); Migration m = new Migration(dbcon, assem, "AssetStore"); m.Update(); } }
public void Initialise(string connectionString) { m_ConnectionString = connectionString; m_Connection = new MySqlConnection(m_ConnectionString); m_Connection.Open(); // Apply new Migrations // Assembly assem = GetType().Assembly; Migration m = new Migration(m_Connection, assem, "RegionStore"); m.Update(); // NOTE: This is a very slow query that times out on regions with a lot of prims. // I'm told that it is no longer relevant so it's commented out now, but if it // is relevant it should be added as a console command instead of part of the // startup phase // Clean dropped attachments // //try //{ // using (MySqlCommand cmd = m_Connection.CreateCommand()) // { // cmd.CommandText = "delete from prims, primshapes using prims " + // "left join primshapes on prims.uuid = primshapes.uuid " + // "where PCode = 9 and State <> 0"; // ExecuteNonQuery(cmd); // } //} //catch (MySqlException ex) //{ // m_log.Error("[REGION DB]: Error cleaning up dropped attachments: " + ex.Message); //} }
public void Initialise(string connectionString) { m_connectionString = connectionString; m_log.Info("[ESTATE DB]: Sqlite - connecting: "+m_connectionString); m_connection = new SqliteConnection(m_connectionString); try { m_connection.Open(); } catch (Exception ex) { throw new Exception("SQLite has errored out on opening the database. If you are on a 64 bit system, please run OpenSim.32BitLaunch.exe and try again. If this is not a 64 bit error :" + ex); } Assembly assem = GetType().Assembly; Migration m = new Migration(m_connection, assem, "EstateStore"); m.Update(); //m_connection.Close(); // m_connection.Open(); Type t = typeof(EstateSettings); m_Fields = t.GetFields(BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.DeclaredOnly); foreach (FieldInfo f in m_Fields) if (f.Name.Substring(0, 2) == "m_") m_FieldMap[f.Name.Substring(2)] = f; }
public void Initialise(string connectionString) { if (Util.IsWindows()) Util.LoadArchSpecificWindowsDll("sqlite3.dll"); m_connectionString = connectionString; m_log.Info("[ESTATE DB]: Sqlite - connecting: "+m_connectionString); m_connection = new SqliteConnection(m_connectionString); m_connection.Open(); Migration m = new Migration(m_connection, Assembly, "EstateStore"); m.Update(); //m_connection.Close(); // m_connection.Open(); Type t = typeof(EstateSettings); m_Fields = t.GetFields(BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.DeclaredOnly); foreach (FieldInfo f in m_Fields) if (f.Name.Substring(0, 2) == "m_") m_FieldMap[f.Name.Substring(2)] = f; }
public void Initialise(string connectionString) { m_connectionString = connectionString; m_log.Info("[ESTATE DB]: Sqlite - connecting: "+m_connectionString); m_connection = new SqliteConnection(m_connectionString); m_connection.Open(); Assembly assem = GetType().Assembly; Migration m = new Migration(m_connection, assem, "EstateStore"); m.Update(); //m_connection.Close(); // m_connection.Open(); Type t = typeof(EstateSettings); m_Fields = t.GetFields(BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.DeclaredOnly); foreach (FieldInfo f in m_Fields) if (f.Name.Substring(0, 2) == "m_") m_FieldMap[f.Name.Substring(2)] = f; }
/// <summary> /// Initialises the estatedata class. /// </summary> /// <param name="connectionString">connectionString.</param> public void Initialise(string connectionString) { if (!string.IsNullOrEmpty(connectionString)) { m_connectionString = connectionString; _Database = new PGSQLManager(connectionString); } //Migration settings using (NpgsqlConnection conn = new NpgsqlConnection(m_connectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, "EstateStore"); m.Update(); } //Interesting way to get parameters! Maybe implement that also with other types Type t = typeof(EstateSettings); _Fields = t.GetFields(BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.DeclaredOnly); foreach (FieldInfo f in _Fields) { if (f.Name.Substring(0, 2) == "m_") _FieldMap[f.Name.Substring(2)] = f; } }
public MySqlUserAccountData(string connectionString, string realm) : base(connectionString) { m_Realm = realm; Migration m = new Migration(m_Connection, GetType().Assembly, "UserStore"); m.Update(); }
public SQLiteGenericData(string connectionString) { m_Connection = new SqliteConnection(connectionString); m_Connection.Open(); Migration m = new Migration(m_Connection, this.GetType().Assembly, "GenericStore"); m.Update(); }
// private int m_LastExpire = 0; public MySqlRegionData(string connectionString, string realm) : base(connectionString) { m_Realm = realm; Migration m = new Migration(m_Connection, GetType().Assembly, "GridStore"); m.Update(); }
public void DeleteSuccessfullRollback(Migration migration, System.Data.IDbConnection openConnection) { using (SqlCommand cmd = new SqlCommand(@"DELETE FROM [DBAppliedMigrations] WHERE [Migration_id]=@migrationId", (SqlConnection)openConnection)) { cmd.Parameters.Add(new SqlParameter("migrationId", migration.Id)); cmd.ExecuteNonQuery(); } }
public void Should_Add_Migration() { var migration = new Migration(); migration.Version = "0.0"; var migrationsService = new MigrationsService(); migrationsService.Register(migration); migrationsService.Migrations.Contains(migration).Should().BeTrue("because we just added this migration"); }
public MSSQLGenericData(string connectionString) { m_ConnectionString = connectionString; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, "GenericStore"); m.Update(); } }
void Init() { using (NpgsqlConnection dbcon = new NpgsqlConnection(ConnectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, Assembly, "UserProfiles"); m.Update(); } }
/// <summary> /// <para>Initialises Asset interface</para> /// <para> /// <list type="bullet"> /// <item>Loads and initialises the MySQL storage plugin.</item> /// <item>Warns and uses the obsolete mysql_connection.ini if connect string is empty.</item> /// <item>Check for migration</item> /// </list> /// </para> /// </summary> /// <param name="connect">connect string</param> public override void Initialise(string connect) { m_connectionString = connect; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, Assembly, "AssetStore"); m.Update(); } }
public MSSQLUserAccountData(string connectionString, string realm) { m_Realm = realm; m_ConnectionString = connectionString; using (SqlConnection conn = new SqlConnection(m_ConnectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, "UserStore"); m.Update(); } }
public MySqlGenericData(string connect) { m_connectionString = connect; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, this.GetType().Assembly, "GenericStore"); m.Update(); } }
/// <summary> /// The method that executes to apply a migration. /// </summary> public override void Migrate() { var migrationTableTemplate = new Migration(); DatabaseSession.Instance.Connector.CreateTable(migrationTableTemplate); foreach (var property in migrationTableTemplate.GetType().GetProperties().Where(p => !Attribute.IsDefined(p, typeof(IgnoreDataMemberAttribute))).ToList()) { DatabaseSession.Instance.Connector.CreateColumn(property, migrationTableTemplate); } DatabaseSession.Instance.Connector.CreatePrimaryKeyConstraint(migrationTableTemplate); }
public PGSQLAuthenticationData(string connectionString, string realm) { m_Realm = realm; m_ConnectionString = connectionString; using (NpgsqlConnection conn = new NpgsqlConnection(m_ConnectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, "AuthStore"); m_database = new PGSQLManager(m_ConnectionString); m.Update(); } }
public async Task StartNewMigration() { var repoNames = _repos.Select(repo => repo.Repository.FullName).ToList(); var migrationRequest = new StartMigrationRequest(repoNames); _migrationContext = await _gitHub.Migration.Migrations.Start(_orgName, migrationRequest); Assert.Equal(3, _migrationContext.Repositories.Count); Assert.Equal(Migration.MigrationState.Pending, _migrationContext.State); ChecksMigrationCompletion(); }
public void Shuld_fail_when_an_applyed_migration_is_not_available() { _database.Stub(x => x.MigrationsTableExists()).Return(true); var migration1 = new Migration(1, "up", "dn"); var migration2 = new Migration(2, "up", "dn"); _repository.Stub(x => x.GetAll()).Return(new Dictionary<long, Migration> { { 1, migration1 }, { 2, migration2 } }); _database.Stub(x => x.GetApplyedMigrations()).Return(new long[] { 3 }); Executing.This(() => _target.GetApplyedMigrations()).Should().Throw<ApplicationException>().And.ValueOf.Message.Should().Be.EqualTo("Migration #3 has been applyed to database, but not found in migrations directory"); }
public MySqlAuthenticationData(string connectionString, string realm) : base(connectionString) { m_Realm = realm; m_connectionString = connectionString; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, Assembly, "AuthStore"); m.Update(); } }
/// <summary> /// Initialises the region datastore /// </summary> /// <param name="connectionString">The connection string.</param> public void Initialise(string connectionString) { m_connectionString = connectionString; _Database = new PGSQLManager(connectionString); using (NpgsqlConnection conn = new NpgsqlConnection(connectionString)) { conn.Open(); //New Migration settings Migration m = new Migration(conn, Assembly, "RegionStore"); m.Update(); } }
//private string m_connectionString; public MySqlRegionData(string connectionString, string realm) : base(connectionString) { m_Realm = realm; m_connectionString = connectionString; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, GetType().Assembly, "GridStore"); m.Update(); } }
public MSSQLRegionData(string connectionString, string realm) { m_Realm = realm; m_ConnectionString = connectionString; m_database = new MSSQLManager(connectionString); using (SqlConnection conn = new SqlConnection(m_ConnectionString)) { conn.Open(); Migration m = new Migration(conn, GetType().Assembly, "GridStore"); m.Update(); } }
/// <summary> /// <para>Initialises Asset interface</para> /// <para> /// <list type="bullet"> /// <item>Loads and initialises the MySQL storage plugin.</item> /// <item>Warns and uses the obsolete mysql_connection.ini if connect string is empty.</item> /// <item>Check for migration</item> /// </list> /// </para> /// </summary> /// <param name="connect">connect string</param> public override void Initialise(string connect) { m_connectionString = connect; // This actually does the roll forward assembly stuff Assembly assem = GetType().Assembly; using (MySqlConnection dbcon = new MySqlConnection(m_connectionString)) { dbcon.Open(); Migration m = new Migration(dbcon, assem, "AssetStore"); m.Update(); } }
/// <summary> /// applique la migration /// </summary> /// <param name="sqLiteConnection"></param> /// <param name="migration"></param> /// <param name="script"></param> private static List <Migration> ApplyMigration(SQLiteConnection sqLiteConnection, Migration migration, string script) { sqLiteConnection.BeginTransaction(); var queries = script.Split(';'); foreach (var query in queries) { if (string.IsNullOrWhiteSpace(query)) { continue; } sqLiteConnection.Execute(query); } migration.Duration = DateTime.Now.Subtract(migration.DateCreation).TotalMilliseconds; sqLiteConnection.Insert(migration); sqLiteConnection.Commit(); return(sqLiteConnection.Table <Migration>().ToList()); }
protected override void OnSetTables(IDataTable[] tables, Migration mode) { }
public static void SqlFile(this Migration dbMigration, MigrationBuilder migrationBuilder, string path) { migrationBuilder.Sql(File.ReadAllText(path)); }
public static Column WithAuditBy(this Migration migration) { return(migration.WithReference("created_by")); }
protected override String CheckColumnsChange(IDataTable entitytable, IDataTable dbtable, Migration mode) { foreach (var item in entitytable.Columns) { // 自增字段必须是主键 if (item.Identity && !item.PrimaryKey) { // 取消所有主键 item.Table.Columns.ForEach(dc => dc.PrimaryKey = false); // 自增字段作为主键 item.PrimaryKey = true; break; } } // 把onlySql设为true,让基类只产生语句而不执行 var sql = base.CheckColumnsChange(entitytable, dbtable, Migration.ReadOnly); if (String.IsNullOrEmpty(sql)) { return(sql); } // 只有修改字段、删除字段需要重建表 if (!sql.Contains("Alter Column") && !sql.Contains("Drop Column")) { if (mode > Migration.ReadOnly) { Database.CreateSession().Execute(sql); } return(sql); } var sql2 = sql; sql = ReBuildTable(entitytable, dbtable); if (sql.IsNullOrEmpty() || mode == Migration.ReadOnly) { return(sql); } // 输出日志,说明重建表的理由 WriteLog("SQLite需要重建表,因无法执行:{0}", sql2); var flag = true; // 如果设定不允许删 if (mode < Migration.Full) { // 看看有没有数据库里面有而实体库里没有的 foreach (var item in dbtable.Columns) { var dc = entitytable.GetColumn(item.ColumnName); if (dc == null) { flag = false; break; } } } if (flag) { Database.CreateSession().Execute(sql); } return(sql); }
/// <summary> /// This API supports the Entity Framework Core infrastructure and is not intended to be used /// directly from your code. This API may change or be removed in future releases. /// </summary> protected virtual void PopulateMigrations( IEnumerable <string> appliedMigrationEntries, string targetMigration, out IReadOnlyList <Migration> migrationsToApply, out IReadOnlyList <Migration> migrationsToRevert, out Migration actualTargetMigration) { var appliedMigrations = new Dictionary <string, TypeInfo>(); var unappliedMigrations = new Dictionary <string, TypeInfo>(); var appliedMigrationEntrySet = new HashSet <string>(appliedMigrationEntries, StringComparer.OrdinalIgnoreCase); if (_migrationsAssembly.Migrations.Count == 0) { _logger.MigrationsNotFound(this, _migrationsAssembly); } foreach (var migration in _migrationsAssembly.Migrations) { if (appliedMigrationEntrySet.Contains(migration.Key)) { appliedMigrations.Add(migration.Key, migration.Value); } else { unappliedMigrations.Add(migration.Key, migration.Value); } } if (string.IsNullOrEmpty(targetMigration)) { migrationsToApply = unappliedMigrations .OrderBy(m => m.Key) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); migrationsToRevert = Array.Empty <Migration>(); actualTargetMigration = null; } else if (targetMigration == Migration.InitialDatabase) { migrationsToApply = Array.Empty <Migration>(); migrationsToRevert = appliedMigrations .OrderByDescending(m => m.Key) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); actualTargetMigration = null; } else { targetMigration = _migrationsAssembly.GetMigrationId(targetMigration); migrationsToApply = unappliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) <= 0) .OrderBy(m => m.Key) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); migrationsToRevert = appliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) > 0) .OrderByDescending(m => m.Key) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); actualTargetMigration = appliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) == 0) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .SingleOrDefault(); } }
private IEnumerable <Func <IReadOnlyList <IRelationalCommand> > > GetMigrationCommands( IReadOnlyList <HistoryRow> appliedMigrationEntries, string targetMigration = null) { var appliedMigrations = new Dictionary <string, TypeInfo>(); var unappliedMigrations = new Dictionary <string, TypeInfo>(); foreach (var migration in _migrationsAssembly.Migrations) { if (appliedMigrationEntries.Any( e => string.Equals(e.MigrationId, migration.Key, StringComparison.OrdinalIgnoreCase))) { appliedMigrations.Add(migration.Key, migration.Value); } else { unappliedMigrations.Add(migration.Key, migration.Value); } } IReadOnlyList <Migration> migrationsToApply; IReadOnlyList <Migration> migrationsToRevert; if (string.IsNullOrEmpty(targetMigration)) { migrationsToApply = unappliedMigrations .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); migrationsToRevert = new Migration[0]; } else if (targetMigration == Migration.InitialDatabase) { migrationsToApply = new Migration[0]; migrationsToRevert = appliedMigrations .OrderByDescending(m => m.Key) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); } else { targetMigration = _migrationsAssembly.GetMigrationId(targetMigration); migrationsToApply = unappliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) <= 0) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); migrationsToRevert = appliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) > 0) .OrderByDescending(m => m.Key) .Select(p => _migrationsAssembly.CreateMigration(p.Value, _activeProvider)) .ToList(); } for (var i = 0; i < migrationsToRevert.Count; i++) { var migration = migrationsToRevert[i]; yield return(() => { _logger.LogInformation(RelationalStrings.RevertingMigration(migration.GetId())); return GenerateDownSql( migration, i != migrationsToRevert.Count - 1 ? migrationsToRevert[i + 1] : null); }); } foreach (var migration in migrationsToApply) { yield return(() => { _logger.LogInformation(RelationalStrings.ApplyingMigration(migration.GetId())); return GenerateUpSql(migration); }); } }
public static Column WithReference(this Migration migration, string column) { return(new Column(column, DbType.Int32)); }
/// <summary> /// The method that executes if a migration needs to be rolled back. /// </summary> public override void Reverse() { var migrationTableTemplate = new Migration(); DatabaseSession.Instance.Connector.DeleteTable(migrationTableTemplate); }
/// <inheritdoc /> public override string ToString() { return(string.Format("MigrationType: {0}, TransactionBehavior: {1}", Migration.GetType(), TransactionBehavior)); }
public static Column WithId(this Migration migration) { return(migration.WithId("id")); }
static void Main(string[] args) { Toolkit.Init(new ToolkitOptions { Backend = PlatformBackend.PreferNative, EnableHighResolution = true }); Version = Assembly.GetEntryAssembly().GetCustomAttribute <AssemblyInformationalVersionAttribute>().InformationalVersion; Console.Title = $"PangoNX Debugger {Version}"; string systemPath = Environment.GetEnvironmentVariable("Path", EnvironmentVariableTarget.Machine); Environment.SetEnvironmentVariable("Path", $"{Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "bin")};{systemPath}"); GLib.ExceptionManager.UnhandledException += Glib_UnhandledException; // Initialize the configuration ConfigurationState.Initialize(); // Initialize the logger system LoggerModule.Initialize(); // Initialize Discord integration DiscordIntegrationModule.Initialize(); Logger.PrintInfo(LogClass.Application, $"PangoNX Debugger Version: {Version}"); Logger.PrintInfo(LogClass.Application, $"Operating System: {SystemInfo.Instance.OsDescription}"); Logger.PrintInfo(LogClass.Application, $"CPU: {SystemInfo.Instance.CpuName}"); Logger.PrintInfo(LogClass.Application, $"Total RAM: {SystemInfo.Instance.RamSizeInMB}"); string localConfigurationPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Config.json"); string globalBasePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "PangoNX Debugger"); string globalConfigurationPath = Path.Combine(globalBasePath, "Config.json"); // Now load the configuration as the other subsystems are now registered if (File.Exists(localConfigurationPath)) { ConfigurationPath = localConfigurationPath; ConfigurationFileFormat configurationFileFormat = ConfigurationFileFormat.Load(localConfigurationPath); ConfigurationState.Instance.Load(configurationFileFormat, ConfigurationPath); } else if (File.Exists(globalConfigurationPath)) { ConfigurationPath = globalConfigurationPath; ConfigurationFileFormat configurationFileFormat = ConfigurationFileFormat.Load(globalConfigurationPath); ConfigurationState.Instance.Load(configurationFileFormat, ConfigurationPath); } else { // No configuration, we load the default values and save it on disk ConfigurationPath = globalConfigurationPath; // Make sure to create the PangoNX Debugger directory if needed. Directory.CreateDirectory(globalBasePath); ConfigurationState.Instance.LoadDefault(); ConfigurationState.Instance.ToFileFormat().SaveConfig(globalConfigurationPath); } Profile.Initialize(); Application.Init(); string globalProdKeysPath = Path.Combine(globalBasePath, "system", "prod.keys"); string userProfilePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".switch", "prod.keys"); if (!File.Exists(globalProdKeysPath) && !File.Exists(userProfilePath) && !Migration.IsMigrationNeeded()) { GtkDialog.CreateWarningDialog("Key file was not found", "Please refer to `KEYS.md` for more info"); } MainWindow mainWindow = new MainWindow(); mainWindow.Show(); if (args.Length == 1) { mainWindow.LoadApplication(args[0]); } Application.Run(); }
/// <summary> /// Update the identified migration in the journal as having been completed. /// It can be assumed that this will only be called for the most recently started mgiration. /// </summary> /// <param name="migration"></param> public void RecordCompleteMigration(Migration migration) { _setCompleteCommand.Parameters["@Name"].Value = migration.Name; _setCompleteCommand.Parameters["@CompletedTs"].Value = DateTime.Now; _setCompleteCommand.ExecuteNonQuery(); }
/// <summary>已重载。因为内存数据库无法检测到架构,不知道表是否已存在,所以需要自己维护</summary> /// <param name="entitytable"></param> /// <param name="dbtable"></param> /// <param name="mode"></param> protected override void CheckTable(IDataTable entitytable, IDataTable dbtable, Migration mode) { if (dbtable == null && (Database as SQLite).IsMemoryDatabase) { if (memoryTables.Any(t => t.TableName.EqualIgnoreCase(entitytable.TableName))) { return; } memoryTables.Add(entitytable); } base.CheckTable(entitytable, dbtable, mode); }
/// <summary> /// This API supports the Entity Framework Core infrastructure and is not intended to be used /// directly from your code. This API may change or be removed in future releases. /// </summary> public static string GetId([NotNull] this Migration migration) => migration.GetType().GetTypeInfo().GetCustomAttribute <MigrationAttribute>()?.Id;
/// <summary> /// Generates down SQL scripts. /// </summary> /// <param name="plugin">The plugin.</param> /// <param name="migration">The migration.</param> /// <param name="previousMigration">The previous migration.</param> /// <returns></returns> protected virtual IReadOnlyList <MigrationCommand> GenerateDownSql(EntityPlugin plugin, Migration migration, Migration previousMigration) { var historyScript = PluginHistoryRepository.GetDeleteScript(plugin, migration.GetType().GetCustomAttribute <MigrationAttribute>().Id); var historyCommand = RawSqlCommandBuilder.Build(historyScript); return(MigrationsSqlGenerator .Generate(migration.DownOperations, previousMigration?.TargetModel) .Concat(new[] { new MigrationCommand(historyCommand, CurrentContext.Context, CommandLogger) }) .ToList()); }
protected override void OnCoordinateBeingLoaded(ChaFileCoordinate coordinate) { /* * if (JetPack.CharaStudio.Running) * TriggerEnabled = false; */ if (!JetPack.CharaMaker.Inside || (JetPack.CharaMaker.Inside && _loadCoordinateExtdata)) { TriggerPropertyList.RemoveAll(x => x.Coordinate == _currentCoordinateIndex); TriggerGroupList.RemoveAll(x => x.Coordinate == _currentCoordinateIndex); PluginData _pluginData = GetCoordinateExtendedData(coordinate); if (_pluginData != null) { if (_pluginData.version > _pluginDataVersion) { _logger.Log(LogLevel.Error | LogLevel.Message, $"[OnCoordinateBeingLoaded][{CharaFullName}] ExtendedData.version: {_pluginData.version} is newer than your plugin"); } else if (_pluginData.version < _pluginDataVersion) { _logger.Log(LogLevel.Info, $"[OnCoordinateBeingLoaded][{CharaFullName}][Migrating from ver. {_pluginData.version}]"); Migration.ConvertOutfitPluginData(_currentCoordinateIndex, _pluginData, ref TriggerPropertyList, ref TriggerGroupList); } else { if (_pluginData.data.TryGetValue("TriggerPropertyList", out object _loadedTriggerProperty) && _loadedTriggerProperty != null) { List <TriggerProperty> _tempTriggerProperty = MessagePackSerializer.Deserialize <List <TriggerProperty> >((byte[])_loadedTriggerProperty); if (_tempTriggerProperty?.Count > 0) { _tempTriggerProperty.ForEach(x => x.Coordinate = _currentCoordinateIndex); TriggerPropertyList.AddRange(_tempTriggerProperty); } if (_pluginData.data.TryGetValue("TriggerGroupList", out object _loadedTriggerGroup) && _loadedTriggerGroup != null) { List <TriggerGroup> _tempTriggerGroup = MessagePackSerializer.Deserialize <List <TriggerGroup> >((byte[])_loadedTriggerGroup); if (_tempTriggerGroup?.Count > 0) { foreach (TriggerGroup _group in _tempTriggerGroup) { _group.Coordinate = _currentCoordinateIndex; if (_group.GUID.IsNullOrEmpty()) { _group.GUID = JetPack.Toolbox.GUID("D"); } if (!JetPack.CharaStudio.Running) { _group.State = _group.Startup; } } TriggerGroupList.AddRange(_tempTriggerGroup); } } } } MissingGroupCheck(_currentCoordinateIndex); MissingPropertyCheck(_currentCoordinateIndex); } InitCurOutfitTriggerInfo("OnCoordinateBeingLoaded"); } base.OnCoordinateBeingLoaded(coordinate); }
/// <summary> /// Generates up SQL scripts. /// </summary> /// <param name="plugin">The plugin.</param> /// <param name="migration">The migration.</param> /// <returns></returns> protected virtual IReadOnlyList <MigrationCommand> GenerateUpSql(EntityPlugin plugin, Migration migration) { var migrationId = migration.GetType().GetCustomAttribute <MigrationAttribute>()?.Id; var historyRow = new HistoryRow(migrationId, ProductInfo.GetVersion()); var historyScript = PluginHistoryRepository.GetInsertScript(plugin, historyRow); var historyCommand = RawSqlCommandBuilder.Build(historyScript); return(MigrationsSqlGenerator .Generate(migration.UpOperations, migration.TargetModel) .Concat(new[] { new MigrationCommand(historyCommand, CurrentContext.Context, CommandLogger) }) .ToList()); }
protected override void OnReload(GameMode currentGameMode) { if (JetPack.CharaStudio.Running) { TriggerEnabled = false; } if (!JetPack.CharaMaker.Inside || (JetPack.CharaMaker.Inside && _loadCharaExtdata)) { TriggerPropertyList.Clear(); TriggerGroupList.Clear(); PluginData _pluginData = GetExtendedData(); if (_pluginData != null) { if (_pluginData.version > _pluginDataVersion) { _logger.Log(LogLevel.Error | LogLevel.Message, $"[OnReload][{CharaFullName}] ExtendedData.version: {_pluginData.version} is newer than your plugin"); } else if (_pluginData.version < _pluginDataVersion) { _logger.Log(LogLevel.Info, $"[OnReload][{CharaFullName}][Migrating from ver. {_pluginData.version}]"); Migration.ConvertCharaPluginData(_pluginData, ref TriggerPropertyList, ref TriggerGroupList); } else { if (_pluginData.data.TryGetValue("TriggerPropertyList", out object _loadedTriggerProperty) && _loadedTriggerProperty != null) { List <TriggerProperty> _tempTriggerProperty = MessagePackSerializer.Deserialize <List <TriggerProperty> >((byte[])_loadedTriggerProperty); if (_tempTriggerProperty?.Count > 0) { TriggerPropertyList.AddRange(_tempTriggerProperty); } if (_pluginData.data.TryGetValue("TriggerGroupList", out object _loadedTriggerGroup) && _loadedTriggerGroup != null) { List <TriggerGroup> _tempTriggerGroup = MessagePackSerializer.Deserialize <List <TriggerGroup> >((byte[])_loadedTriggerGroup); if (_tempTriggerGroup?.Count > 0) { foreach (TriggerGroup _group in _tempTriggerGroup) { if (_group.GUID.IsNullOrEmpty()) { _group.GUID = JetPack.Toolbox.GUID("D"); } if (!JetPack.CharaStudio.Running) { _group.State = _group.Startup; } } TriggerGroupList.AddRange(_tempTriggerGroup); } } } } } if (JetPack.CharaStudio.Running && _cfgStudioAutoEnable.Value) { StartCoroutine(StudioAutoEnableCoroutine()); } InitCurOutfitTriggerInfo("OnReload"); } base.OnReload(currentGameMode); }
/// <summary> /// Checks whether the migration should execute. /// </summary> /// <returns>True if the migration should run.</returns> public override bool ShouldMigrate() { var migrationTableTemplate = new Migration(); return(!DatabaseSession.Instance.Connector.CheckTableExists(migrationTableTemplate)); }
public static Column WithAuditAt(this Migration migration) { return(new Column("created_at", DbType.DateTime)); }
/// <summary> /// Populates the migration lists. /// </summary> /// <param name="plugin">The plugin.</param> /// <param name="appliedMigrationEntries">The applied migration entries.</param> /// <param name="targetMigration">The target migration.</param> /// <param name="migrationsToApply">The migrations to apply.</param> /// <param name="migrationsToRevert">The migrations to revert.</param> /// <param name="actualTargetMigration">The actual target migration.</param> protected virtual void PopulateMigrations( EntityPlugin plugin, IEnumerable <string> appliedMigrationEntries, string targetMigration, out IReadOnlyList <Migration> migrationsToApply, out IReadOnlyList <Migration> migrationsToRevert, out Migration actualTargetMigration) { var appliedMigrations = new Dictionary <string, TypeInfo>(); var unappliedMigrations = new Dictionary <string, TypeInfo>(); var migrations = plugin.GetMigrations().ToList(); if (migrations.Count == 0) { Logger.LogInformation(LoggingEvents.MigrationsNotFoundId, LoggingEvents.MigrationsNotFound, plugin.Name); } // // Determine the set of applied and unapplied migrations. // foreach (var migration in migrations) { var migrationId = migration.GetCustomAttribute <MigrationAttribute>().Id; if (appliedMigrationEntries.Contains(migrationId)) { appliedMigrations.Add(migrationId, migration.GetTypeInfo()); } else { unappliedMigrations.Add(migrationId, migration.GetTypeInfo()); } } // // Build the list of migrations to apply or revert. // if (string.IsNullOrEmpty(targetMigration)) { // // Migrate to latest version. // migrationsToApply = unappliedMigrations .OrderBy(m => m.Key) .Select(p => MigrationsAssembly.CreateMigration(p.Value, DatabaseProvider.Name)) .ToList(); migrationsToRevert = Array.Empty <Migration>(); actualTargetMigration = null; } else if (targetMigration == Migration.InitialDatabase) { // // Migrate to uninstalled. // migrationsToApply = Array.Empty <Migration>(); migrationsToRevert = appliedMigrations .OrderByDescending(m => m.Key) .Select(p => MigrationsAssembly.CreateMigration(p.Value, DatabaseProvider.Name)) .ToList(); actualTargetMigration = null; } else { // // Migrate to specific version. // migrationsToApply = unappliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) <= 0) .OrderBy(m => m.Key) .Select(p => MigrationsAssembly.CreateMigration(p.Value, DatabaseProvider.Name)) .ToList(); migrationsToRevert = appliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) > 0) .OrderByDescending(m => m.Key) .Select(p => MigrationsAssembly.CreateMigration(p.Value, DatabaseProvider.Name)) .ToList(); actualTargetMigration = appliedMigrations .Where(m => string.Compare(m.Key, targetMigration, StringComparison.OrdinalIgnoreCase) == 0) .Select(p => MigrationsAssembly.CreateMigration(p.Value, DatabaseProvider.Name)) .SingleOrDefault(); } }
private void applyMigrationsForVersion(Migration migration, ulong targetVersion) { switch (targetVersion) { case 7: convertOnlineIDs <RealmBeatmap>(); convertOnlineIDs <RealmBeatmapSet>(); convertOnlineIDs <RealmRuleset>(); void convertOnlineIDs <T>() where T : RealmObject { string className = getMappedOrOriginalName(typeof(T)); // version was not bumped when the beatmap/ruleset models were added // therefore we must manually check for their presence to avoid throwing on the `DynamicApi` calls. if (!migration.OldRealm.Schema.TryFindObjectSchema(className, out _)) { return; } var oldItems = migration.OldRealm.DynamicApi.All(className); var newItems = migration.NewRealm.DynamicApi.All(className); int itemCount = newItems.Count(); for (int i = 0; i < itemCount; i++) { dynamic?oldItem = oldItems.ElementAt(i); dynamic?newItem = newItems.ElementAt(i); long?nullableOnlineID = oldItem?.OnlineID; newItem.OnlineID = (int)(nullableOnlineID ?? -1); } } break; case 8: // Ctrl -/+ now adjusts UI scale so let's clear any bindings which overlap these combinations. // New defaults will be populated by the key store afterwards. var keyBindings = migration.NewRealm.All <RealmKeyBinding>(); var increaseSpeedBinding = keyBindings.FirstOrDefault(k => k.ActionInt == (int)GlobalAction.IncreaseScrollSpeed); if (increaseSpeedBinding != null && increaseSpeedBinding.KeyCombination.Keys.SequenceEqual(new[] { InputKey.Control, InputKey.Plus })) { migration.NewRealm.Remove(increaseSpeedBinding); } var decreaseSpeedBinding = keyBindings.FirstOrDefault(k => k.ActionInt == (int)GlobalAction.DecreaseScrollSpeed); if (decreaseSpeedBinding != null && decreaseSpeedBinding.KeyCombination.Keys.SequenceEqual(new[] { InputKey.Control, InputKey.Minus })) { migration.NewRealm.Remove(decreaseSpeedBinding); } break; case 9: // Pretty pointless to do this as beatmaps aren't really loaded via realm yet, but oh well. string metadataClassName = getMappedOrOriginalName(typeof(RealmBeatmapMetadata)); // May be coming from a version before `RealmBeatmapMetadata` existed. if (!migration.OldRealm.Schema.TryFindObjectSchema(metadataClassName, out _)) { return; } var oldMetadata = migration.OldRealm.DynamicApi.All(metadataClassName); var newMetadata = migration.NewRealm.All <RealmBeatmapMetadata>(); int metadataCount = newMetadata.Count(); for (int i = 0; i < metadataCount; i++) { dynamic?oldItem = oldMetadata.ElementAt(i); var newItem = newMetadata.ElementAt(i); string username = oldItem.Author; newItem.Author = new RealmUser { Username = username }; } break; case 10: string rulesetSettingClassName = getMappedOrOriginalName(typeof(RealmRulesetSetting)); if (!migration.OldRealm.Schema.TryFindObjectSchema(rulesetSettingClassName, out _)) { return; } var oldSettings = migration.OldRealm.DynamicApi.All(rulesetSettingClassName); var newSettings = migration.NewRealm.All <RealmRulesetSetting>().ToList(); for (int i = 0; i < newSettings.Count; i++) { dynamic?oldItem = oldSettings.ElementAt(i); var newItem = newSettings.ElementAt(i); long rulesetId = oldItem.RulesetID; string?rulesetName = getRulesetShortNameFromLegacyID(rulesetId); if (string.IsNullOrEmpty(rulesetName)) { migration.NewRealm.Remove(newItem); } else { newItem.RulesetName = rulesetName; } } break; case 11: string keyBindingClassName = getMappedOrOriginalName(typeof(RealmKeyBinding)); if (!migration.OldRealm.Schema.TryFindObjectSchema(keyBindingClassName, out _)) { return; } var oldKeyBindings = migration.OldRealm.DynamicApi.All(keyBindingClassName); var newKeyBindings = migration.NewRealm.All <RealmKeyBinding>().ToList(); for (int i = 0; i < newKeyBindings.Count; i++) { dynamic?oldItem = oldKeyBindings.ElementAt(i); var newItem = newKeyBindings.ElementAt(i); if (oldItem.RulesetID == null) { continue; } long rulesetId = oldItem.RulesetID; string?rulesetName = getRulesetShortNameFromLegacyID(rulesetId); if (string.IsNullOrEmpty(rulesetName)) { migration.NewRealm.Remove(newItem); } else { newItem.RulesetName = rulesetName; } } break; } }
protected virtual void HandleMigrationCallbackDelegate(Migration migration, ulong oldSchemaVersion) { }
/// <inheritdoc /> public string GetName() { return(string.Format("{0}: {1}", Version, Migration.GetType().Name)); }
public static Column WithId(this Migration migration, string columnName) { return(new Column(columnName, DbType.Int32, ColumnProperty.PrimaryKeyWithIdentity)); }
/// /// // This method gets called by the runtime. Use this method to add services to the container. public void ConfigureServices(IServiceCollection services) { var connection = $"{Configuration["CONNECTION_STRING"]}Database={Configuration["DATABASE_NAME"]};"; Migration.Apply(Configuration["CONNECTION_STRING"], Configuration["DATABASE_NAME"]); services.AddDbContext <MySqlContext>(options => options.UseMySql(connection)); services.AddAutoMapper(); services.AddMvc().SetCompatibilityVersion(CompatibilityVersion.Version_2_2); services.AddApiVersioning(); //Auth var signingConfigurations = new SigningConfiguration(); services.AddSingleton(signingConfigurations); var tokenConfigurations = new TokenConfiguration(); tokenConfigurations.Audience = "MyAudienceExample"; tokenConfigurations.Issuer = "MyIssuerExapmle"; tokenConfigurations.Seconds = 1200; services.AddSingleton(tokenConfigurations); services.AddAuthentication(authOptions => { authOptions.DefaultAuthenticateScheme = JwtBearerDefaults.AuthenticationScheme; authOptions.DefaultChallengeScheme = JwtBearerDefaults.AuthenticationScheme; }).AddJwtBearer(bearerOptions => { var paramsValidation = bearerOptions.TokenValidationParameters; paramsValidation.IssuerSigningKey = signingConfigurations.Key; paramsValidation.ValidAudience = tokenConfigurations.Audience; paramsValidation.ValidIssuer = tokenConfigurations.Issuer; // Validates the signing of a received token paramsValidation.ValidateIssuerSigningKey = true; // Checks if a received token is still valid paramsValidation.ValidateLifetime = true; // Tolerance time for the expiration of a token (used in case // of time synchronization problems between different // computers involved in the communication process) paramsValidation.ClockSkew = TimeSpan.Zero; }); // Enables the use of the token as a means of // authorizing access to this project's resources services.AddAuthorization(auth => { auth.AddPolicy("Bearer", new AuthorizationPolicyBuilder() .AddAuthenticationSchemes(JwtBearerDefaults.AuthenticationScheme) .RequireAuthenticatedUser().Build()); }); //Swagger services.AddSwaggerGen(c => { c.SwaggerDoc("v1", new Info { Title = "RESTFul API .Net Core Repository Pattern Sample", Version = "v1" }); //Creating the button to inform the JWT Token c.AddSecurityDefinition("Bearer", new ApiKeyScheme { In = "header", Description = "Please enter JWT with Bearer into field", Name = "Authorization", Type = "apiKey" }); c.AddSecurityRequirement(new Dictionary <string, IEnumerable <string> > { { "Bearer", Enumerable.Empty <string> () }, }); // Set the comments path for the Swagger JSON and UI. var xmlFile = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml"; var xmlPath = Path.Combine(AppContext.BaseDirectory, xmlFile); c.IncludeXmlComments(xmlPath); }); /* * Dependency Groups */ services.AddScoped <IPersonBusiness, PersonBusiness>(); services.AddScoped <IPersonRepository, PersonRepository>(); services.AddScoped <IBookBusiness, BookBusiness>(); services.AddScoped <IBookRepository, BookRepository>(); services.AddScoped <IUserBusiness, UserBusiness>(); services.AddScoped <IUserRepository, UserRepository>(); }
/// <summary> /// 自动扫描迁移模型并创建树形实体视图 /// </summary> /// <param name="migrationBuilder">迁移构造器</param> /// <param name="migration">迁移类实例</param> /// <param name="viewAssembly">实体类所在程序集</param> public static MigrationBuilder CreateTreeEntityView(this MigrationBuilder migrationBuilder, Migration migration, Assembly viewAssembly) { if (viewAssembly == null) { throw new NullReferenceException($"{nameof(viewAssembly)} cannot be null."); } foreach (var entityType in migration.TargetModel.GetEntityTypes().Where(entity => viewAssembly.GetType(entity.Name).IsDerivedFrom(typeof(IDomainTreeEntity <,>)))) { migrationBuilder.CreateTreeEntityView(entityType.GetTableName(), entityType.GetProperties().Select(pro => pro.GetColumnName())); } return(migrationBuilder); }
static void Main(string[] args) { Toolkit.Init(new ToolkitOptions { Backend = PlatformBackend.PreferNative, EnableHighResolution = true }); Version = Assembly.GetEntryAssembly().GetCustomAttribute <AssemblyInformationalVersionAttribute>().InformationalVersion; Console.Title = $"Ryujinx Console {Version}"; string systemPath = Environment.GetEnvironmentVariable("Path", EnvironmentVariableTarget.Machine); Environment.SetEnvironmentVariable("Path", $"{Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "bin")};{systemPath}"); // Hook unhandled exception and process exit events GLib.ExceptionManager.UnhandledException += (GLib.UnhandledExceptionArgs e) => ProcessUnhandledException(e.ExceptionObject as Exception, e.IsTerminating); AppDomain.CurrentDomain.UnhandledException += (object sender, UnhandledExceptionEventArgs e) => ProcessUnhandledException(e.ExceptionObject as Exception, e.IsTerminating); AppDomain.CurrentDomain.ProcessExit += (object sender, EventArgs e) => ProgramExit(); // Initialize the configuration ConfigurationState.Initialize(); // Initialize the logger system LoggerModule.Initialize(); // Initialize Discord integration DiscordIntegrationModule.Initialize(); string localConfigurationPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Config.json"); string globalBasePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "Ryujinx"); string globalConfigurationPath = Path.Combine(globalBasePath, "Config.json"); // Now load the configuration as the other subsystems are now registered if (File.Exists(localConfigurationPath)) { ConfigurationPath = localConfigurationPath; ConfigurationFileFormat configurationFileFormat = ConfigurationFileFormat.Load(localConfigurationPath); ConfigurationState.Instance.Load(configurationFileFormat, ConfigurationPath); } else if (File.Exists(globalConfigurationPath)) { ConfigurationPath = globalConfigurationPath; ConfigurationFileFormat configurationFileFormat = ConfigurationFileFormat.Load(globalConfigurationPath); ConfigurationState.Instance.Load(configurationFileFormat, ConfigurationPath); } else { // No configuration, we load the default values and save it on disk ConfigurationPath = globalConfigurationPath; // Make sure to create the Ryujinx directory if needed. Directory.CreateDirectory(globalBasePath); ConfigurationState.Instance.LoadDefault(); ConfigurationState.Instance.ToFileFormat().SaveConfig(globalConfigurationPath); } PrintSystemInfo(); Profile.Initialize(); Application.Init(); string globalProdKeysPath = Path.Combine(globalBasePath, "system", "prod.keys"); string userProfilePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.UserProfile), ".switch", "prod.keys"); if (!File.Exists(globalProdKeysPath) && !File.Exists(userProfilePath) && !Migration.IsMigrationNeeded()) { GtkDialog.CreateWarningDialog("Key file was not found", "Please refer to `KEYS.md` for more info"); } MainWindow mainWindow = new MainWindow(); mainWindow.Show(); if (args.Length == 1) { mainWindow.LoadApplication(args[0]); } Application.Run(); }
public AppTaskResult Run(bool throwIfError) { using var db = DbFactory.Open(); Init(db); var allLogs = new StringBuilder(); var remainingMigrations = MigrationTypes.ToList(); LogMigrationsFound(remainingMigrations); var startAt = DateTime.UtcNow; var migrationsRun = new List <IAppTask>(); while (true) { Type?nextRun; try { nextRun = GetNextMigrationToRun(db, remainingMigrations); if (nextRun == null) { break; } } catch (Exception e) { Log.Error(e.Message); if (throwIfError) { throw; } return(new AppTaskResult(migrationsRun) { Error = e }); } var migrationStartAt = DateTime.UtcNow; var descFmt = AppTasks.GetDescFmt(nextRun); Log.Info($"Running {nextRun.Name}{descFmt}..."); var migration = new Migration { Name = nextRun.Name, Description = AppTasks.GetDesc(nextRun), CreatedDate = DateTime.UtcNow, ConnectionString = ((OrmLiteConnectionFactory)DbFactory).ConnectionString, NamedConnection = nextRun.FirstAttribute <NamedConnectionAttribute>()?.Name, }; var id = db.Insert(migration, selectIdentity: true); var instance = Run(DbFactory, nextRun, x => x.Up()); migrationsRun.Add(instance); Log.Info(instance.Log); if (instance.Error == null) { Log.Info($"Completed {nextRun.Name}{descFmt} in {(DateTime.UtcNow - migrationStartAt).TotalSeconds:N3}s" + Environment.NewLine); // Record completed migration run in DB db.UpdateOnly(() => new Migration { Log = instance.Log, CompletedDate = DateTime.UtcNow, }, where : x => x.Id == id); remainingMigrations.Remove(nextRun); } else { var e = instance.Error; Log.Error(e.Message, e); // Save Error in DB db.UpdateOnly(() => new Migration { Log = instance.Log, ErrorCode = e.GetType().Name, ErrorMessage = e.Message, ErrorStackTrace = e.StackTrace, }, where : x => x.Id == id); if (throwIfError) { throw instance.Error; } return(new AppTaskResult(migrationsRun)); } } var migrationsCompleted = migrationsRun.Count(x => x.Error == null); if (migrationsCompleted == 0) { Log.Info("No migrations to run."); } else { var migration = migrationsCompleted > 1 ? "migrations" : "migration"; Log.Info($"{Environment.NewLine}Ran {migrationsCompleted} {migration} in {(DateTime.UtcNow - startAt).TotalSeconds:N3}s"); } return(new AppTaskResult(migrationsRun)); }