private void OnSeqRun() { for (int i = stopIndex; i < Count; i++) { if (IsRun) { try { this[i].Single(); System.Diagnostics.Debug.WriteLine("testing " + i); } catch (Exception exp) { this[i].CleanUp(); ExceptionCollection.Add(exp); IsRun = IsRunWhenException; } if (ProgressReporter != null) { ProgressReporter.Invoke(i); } } else { stopIndex = i; System.Diagnostics.Debug.WriteLine(stopIndex); break; } } if (SeqFinish != null) { SeqFinish.Invoke(); IsRuning = false; } }
private void Load(ZipArchive archive) { if (!IsPreLoaded) { Preload(archive); } if (IsLoaded) { return; } Manifest = GetManifest(archive, ResourcePackType.Java); Dictionary <ResourceLocation, ResourcePackModelBase> models = new Dictionary <ResourceLocation, ResourcePackModelBase>(); //Dictionary<string, ResourcePackItem> items = new Dictionary<string, ResourcePackItem>(); var total = archive.Entries.Count; int count = 0; foreach (var entry in archive.Entries) { count++; ProgressReporter?.Invoke((int)(((double)count / (double)total) * 100D), entry.Name); var textureMatchs = IsTextureResource.Match(entry.FullName); if (textureMatchs.Success) { ProcessTexture(entry, textureMatchs); continue; } var textureMetaMatch = IsTextureMetaResource.Match(entry.FullName); if (textureMetaMatch.Success) { LoadTextureMeta(entry, textureMetaMatch); continue; } var languageMatchs = IsLanguageResource.Match(entry.FullName); if (languageMatchs.Success) { LoadLocalization(entry, languageMatchs); continue; } var modelMatch = IsModelRegex.Match(entry.FullName); if (modelMatch.Success) { var resourceLocation = new ResourceLocation(modelMatch.Groups["namespace"].Value, SanitizeFilename(modelMatch.Groups["filename"].Value)); var model = ReadModel(entry, resourceLocation); if (model != null) { models.Add(resourceLocation, model); } continue; } var blockStateMatch = IsBlockStateRegex.Match(entry.FullName); if (blockStateMatch.Success) { LoadBlockState(entry, blockStateMatch); continue; } var glyphSizeMatch = IsGlyphSizes.Match(entry.FullName); if (glyphSizeMatch.Success) { LoadGlyphSizes(entry); continue; } } total = models.Count; count = 0; foreach (var model in models.OrderBy( x => (string.IsNullOrWhiteSpace(x.Value.ParentName) || x.Value.ParentName.StartsWith("builtin/")) ? 0 : 1)) { ProgressReporter?.Invoke((int)(((double)count / (double)total) * 100D), model.Key.ToString()); ProcessModel(model.Key, model.Value, ref models); count++; } var blockStates = _blockStates.ToArray(); total = blockStates.Length; count = 0; foreach (var blockState in blockStates.OrderBy(x => x.Value.Parts.Length + x.Value.Variants.Count)) { ProgressReporter?.Invoke((int)(((double)count / (double)total) * 100D), blockState.Key.ToString()); _blockStates[blockState.Key] = ProcessBlockState(blockState.Value); count++; } LoadColormap(); IsLoaded = true; }
public void Run(ProgressReporter progressReporter = null, ErrorReporter errorReporter = null) { if (false == File.Exists(Config.BigSqlFilePath)) { throw new ArgumentException($"specified big sql file not exist: {Config.BigSqlFilePath}", nameof(Config.BigSqlFilePath)); } // reset the stop flag if (Stop) { return; } Stop = false; var sessionLevelDbPath = GetSessionLevelDbPath(Config.BigSqlFilePath); if (false == Config.ContinueFromLastSessionWhenStarted && Directory.Exists(sessionLevelDbPath)) { var sessionBackupPath = GetSessionBackupPath(sessionLevelDbPath); Directory.Move(sessionLevelDbPath, sessionBackupPath); } using (var streamReader = new StreamReader(Config.BigSqlFilePath)) using (var sessionDb = new SessionLevelDb(sessionLevelDbPath, Config.SessionSaveType)) { int sqlUnitIndex = 0; int affectedRows = 0; while (true) { // stop when requested if (Stop) { StopCallBack?.Invoke(); return; } // read from file var sqlUnitList = ReadBatchSqlUnits(streamReader, Config.BatchSize, Config.SqlUnitEndingLine, sessionDb, ref sqlUnitIndex); if (false == sqlUnitList.Any()) { break; } // prepare sql var batchSql = SqlUnit.CombineSqlUnitList(sqlUnitList); if (false == string.IsNullOrWhiteSpace(batchSql)) { // batch execute sql HelperFns.TryThenException( () => { var thisAffectedRows = RunSql(Config.ConnectionString, batchSql); if (thisAffectedRows > 0) { affectedRows += thisAffectedRows; } }, Config.RetryIntervalWhenError, true, Config.RetryNumberWhenError + 1, (e, i) => errorReporter?.Invoke($"{e.Message}; retry in {Config.RetryIntervalWhenError.TotalSeconds} seconds...") ); // set executing status foreach (var sqlUnit in sqlUnitList) { sessionDb.SetSqlUnitExecuteStatus(sqlUnit.Index, sqlUnit.Sql, true); } // report progress progressReporter?.Invoke(sqlUnitIndex, affectedRows); } } } }
public async Task Run(ProgressReporter progressReporter = null, ErrorReporter errorReporter = null) { try { WriteLog += WriteLogToFile; await LogPool.AddLog(LogItem.MakeLog(new LogMessage(">> Started running..."))); var bigSqlStorageFile = await StorageFile.GetFileFromPathAsync(Config.BigSqlFilePath); if (null == bigSqlStorageFile) { throw new ArgumentException($"specified big sql file not exist: {Config.BigSqlFilePath}", nameof(Config.BigSqlFilePath)); } // reset the stop flag if (Stop) { return; } Stop = false; var sessionLevelDbPath = await GetSessionLevelDbPath(Config.BigSqlFilePath); if (false == Config.ContinueFromLastSessionWhenStarted && Directory.Exists(sessionLevelDbPath)) { var sessionBackupPath = GetSessionBackupPath(sessionLevelDbPath); Directory.Move(sessionLevelDbPath, sessionBackupPath); } using (var readStream = await bigSqlStorageFile.OpenStreamForReadAsync()) using (var streamReader = new StreamReader(readStream)) using (var sessionDb = new SessionLevelDb(sessionLevelDbPath, Config.SessionSaveType)) { int sqlUnitIndex = 0; int affectedRows = 0; while (true) { // stop when requested if (Stop) { await LogPool.AddLog(LogItem.MakeLog(new LogMessage(">> Canceled by user."))); StopCallBack?.Invoke(); return; } // read from file var startUnitIndex = sqlUnitIndex; var sqlUnitList = ReadBatchSqlUnits(streamReader, Config.BatchSize, Config.SqlUnitEndingLine, sessionDb, ref sqlUnitIndex); // check skip count var unitIndexDiff = sqlUnitIndex - startUnitIndex; var skipCount = unitIndexDiff - sqlUnitList.Count(); if (skipCount > 0) { await LogPool.AddLog(LogItem.MakeLog(new LogMessage($"Skipped {skipCount} already executed units."))); } // break when nothing to do if (false == sqlUnitList.Any()) { break; } // prepare sql var batchSql = SqlUnit.CombineSqlUnitList(sqlUnitList); if (false == string.IsNullOrWhiteSpace(batchSql)) { // batch execute sql await HelperFns.TryThenException( async() => { var thisAffectedRows = await RunSql(Config.ConnectionString, batchSql); if (thisAffectedRows > 0) { affectedRows += thisAffectedRows; } }, Config.RetryIntervalWhenError, true, Config.RetryNumberWhenError + 1, async (e, i) => { var message = $"{e.Message}; retry in {Config.RetryIntervalWhenError.TotalSeconds} seconds..."; await LogPool.AddLog(LogItem.MakeLog(new LogMessage(message))); errorReporter?.Invoke(message); } ); // set executing status foreach (var sqlUnit in sqlUnitList) { sessionDb.SetSqlUnitExecuteStatus(sqlUnit.Index, sqlUnit.Sql, true); } // report progress await LogPool.AddLog(LogItem.MakeLog(new LogMessage(sqlUnitIndex, affectedRows))); progressReporter?.Invoke(sqlUnitIndex, affectedRows); } } } await LogPool.AddLog(LogItem.MakeLog(new LogMessage(">> Completed."))); } finally { WriteLog -= WriteLogToFile; } }