public Task <IMapset> Import(FileInfo file, TaskListener <IMapset> listener = null) { return(Task.Run <IMapset>(async() => { try { // Start importing the file Mapset mapset = await store.Import(file, listener: listener?.CreateSubListener <Mapset>()); if (mapset != null) { // Mapset must be fully loaded. Mapset loadedMapset = store.LoadData(mapset); if (loadedMapset != null) { // Dispatch mapset imported event on main thread. UnityThread.Dispatch(() => { // Add to all mapsets allMapsets.AddOrReplace(loadedMapset); // Reapply filter Search(lastSearch); OnImportMapset?.Invoke(loadedMapset); return null; }); } else { notificationBox?.Add(new Notification() { Message = $"Failed to load imported mapset ({mapset.Metadata.Artist} - {mapset.Metadata.Title})", Type = NotificationType.Error }); } } else { notificationBox?.Add(new Notification() { Message = $"Failed to import mapset at ({file.FullName})", Type = NotificationType.Error }); } listener?.SetFinished(mapset); return mapset; } catch (Exception e) { Logger.LogError($"Error while importing mapset: {e.Message}\n{e.StackTrace}"); listener?.SetFinished(); return null; } })); }
public void TestAutoFinish() { var listener = new TaskListener(); Assert.IsFalse(listener.IsAutoFinish); Assert.IsFalse(listener.IsFinished); listener.SetFinished(); Assert.IsTrue(listener.IsFinished); listener = new TaskListener(); listener.IsAutoFinish = true; Assert.IsFalse(listener.IsFinished); Assert.IsTrue(listener.IsAutoFinish); listener.SetFinished(); Assert.IsTrue(listener.IsFinished); listener = new TaskListener(); var sub = listener.CreateSubListener(); sub.SetFinished(); Assert.IsFalse(listener.IsFinished); var sub2 = listener.CreateSubListener(); var sub3 = listener.CreateSubListener(); listener.IsAutoFinish = true; Assert.IsFalse(listener.IsFinished); sub2.SetFinished(); Assert.IsFalse(listener.IsFinished); sub3.SetFinished(); Assert.IsTrue(listener.IsFinished); }
public Task Reload(TaskListener listener = null) { return(Task.Run(async() => { if (listener != null) { listener.HasOwnProgress = false; } // Wait for store reloading. await store.Reload(listener?.CreateSubListener()); // Run on the main thread UnityThread.DispatchUnattended(() => { // Refill the mapset list allMapsets.Clear(); allMapsets.AddRange(store.Mapsets); // TODO: Process for a case where the previously selected map no longer exists. // Fill the displayed mapsets list using last search term. Search(lastSearch); // Finished listener?.SetFinished(); return null; }); })); }
public override Task Reload(TaskListener listener = null) { return(Task.Run(async() => { // Perform internal reloading routine. await base.Reload(listener?.CreateSubListener()); // Retrieve processor count. // Load all mapsets from the storage. var rawMapsets = GetAll().ToList(); var threadedLoader = new ThreadedLoader <Mapset, Mapset>(ProcessMapsetLoad); var results = await threadedLoader.StartLoad( 8, rawMapsets, listener: listener?.CreateSubListener <Mapset[]>() ); // Add results to the mapsets list. lock (mapsets) { mapsets.Clear(); for (int i = 0; i < results.Length; i++) { if (results[i] != null) { mapsets.Add(results[i]); } } } listener?.SetFinished(); })); }
/// <summary> /// Records the specified play record under the current player. /// </summary> public Task <IRecord> RecordScore(IScoreProcessor scoreProcessor, int playTime, TaskListener <IRecord> listener = null) { return(Task.Run <IRecord>(async() => { try { if (scoreProcessor == null || scoreProcessor.JudgeCount <= 0) { listener?.SetFinished(); return null; } // Retrieve user and user stats. var currentMap = currentParameter.Map; var user = UserManager.CurrentUser.Value; var userStats = user.GetStatistics(currentMap.PlayableMode); // Record the play result to records database and user statistics. Record newRecord = new Record(currentMap, user, scoreProcessor, playTime); lastRecord = newRecord; // Retrieve old records for the map and user. var records = await RecordStore.GetTopRecords(currentMap, user, limit: null, listener: listener?.CreateSubListener <List <IRecord> >()); // Save as cleared play. if (scoreProcessor.IsFinished) { RecordStore.SaveRecord(newRecord); var bestRecord = records == null || records.Count == 0 ? null : records[0]; userStats.RecordPlay(newRecord, bestRecord); } // Save as failed play. else { userStats.RecordIncompletePlay(newRecord); } listener?.SetFinished(newRecord); return newRecord; } catch (Exception e) { Logger.LogError($"Error while recording score: {e.Message}\n{e.StackTrace}"); listener?.SetFinished(); return null; } })); }
public Task Reload(TaskListener listener = null) { return(Task.Run(() => { base.Reload(); listener?.SetFinished(); })); }
public virtual Task Reload(TaskListener listener = null) { return(Task.Run(() => { InitModules(true); LoadOrphanedData(listener?.CreateSubListener()); listener.SetFinished(); })); }
public Task Reload(TaskListener listener = null) { return(Task.Run(() => { dependencies.Inject(offlineUser); userStore.Reload(); listener?.SetFinished(); })); }
void ITask <T> .StartTask(TaskListener <T> listener) { TaskListener <IWebRequest> newListener = null; if (listener != null) { listener.HasOwnProgress = false; newListener = listener.CreateSubListener <IWebRequest>(); newListener.OnFinished += (req) => listener.SetFinished(Output); } Request(newListener); }
public void TestFinished() { var listener = new TaskListener(); bool isFinished = false; listener.OnFinished += () => isFinished = true; Assert.IsFalse(listener.IsFinished); Assert.IsFalse(isFinished); Assert.AreEqual(0f, listener.Progress, Delta); listener.SetFinished(); Assert.IsTrue(listener.IsFinished); Assert.IsTrue(isFinished); Assert.AreEqual(1f, listener.Progress, Delta); }
/// <summary> /// Tries loading all orphaned data which exist in the directory storage but somehow not indexed in the database. /// </summary> private void LoadOrphanedData(TaskListener listener = null) { var directoryList = new List <DirectoryInfo>(storage.GetAll()); for (int i = 0; i < directoryList.Count; i++) { var dir = directoryList[i]; // Report on the progress. listener?.SetProgress((float)i / directoryList.Count); // Find an entry in the database with matching directory name against index Id. using (var result = database.Query().Where(inx => inx["Id"].ToString().Equals(dir.Name)).GetResult()) { // If already exists, just continue. if (result.Count > 0) { continue; } // Else, we must adopt this result. var data = ParseData(dir); // If this is not a valid data, delete the directory. if (data == null) { storage.Delete(dir.Name); continue; } // Calculate hashcode data.CalculateHash(); // Allocate a new id for data. PostProcessData(data, Guid.NewGuid()); // Move the old directory to new directory name. dir.MoveTo(Path.Combine(dir.Parent.FullName, data.Id.ToString())); // Register this data as a new entry. database.Edit().Write(data).Commit(); OnNewData?.Invoke(data); Logger.LogInfo($"DirectoryBackedStore.LoadOrphanedData - Successfully adopted orphaned data at: {dir.FullName}"); } } listener?.SetFinished(); }
public Task <List <IRecord> > GetTopRecords(IPlayableMap map, int?limit = null, TaskListener <List <IRecord> > listener = null) { return(Task.Run(() => { using (var query = Database.Query()) { ApplyFilterMap(query, map); List <IRecord> records = query.GetResult().Cast <IRecord>().ToList(); records.SortByTop(); ApplyLimit(records, limit); listener?.SetFinished(records); return records; } })); }
public void TestFinishedState() { var listener = new TaskListener <string>(); listener.SetValue("lolz"); listener.SetFinished(); Assert.AreEqual("lolz", listener.Value); Assert.IsTrue(listener.IsFinished); Assert.AreEqual(1f, listener.TotalProgress, Delta); listener.SetProgress(0.5f); listener.SetValue("a"); Assert.AreEqual("lolz", listener.Value); Assert.IsTrue(listener.IsFinished); Assert.AreEqual(1f, listener.TotalProgress, Delta); }
public void TestGeneric() { var listener = new TaskListener <int>(); int result = 0; listener.OnFinished += (v) => result = v; Assert.AreEqual(default(int), listener.Value); Assert.AreEqual(default(int), result); listener.SetValue(5); Assert.AreEqual(5, listener.Value); Assert.AreEqual(0, result); listener.SetFinished(); Assert.AreEqual(5, listener.Value); Assert.AreEqual(5, result); }
public IEnumerator TestEventOnOtherThread() { UnityThread.Initialize(); var listener = new TaskListener(); int mainThread = Thread.CurrentThread.ManagedThreadId; int finishedThreadFlag = 0; // 0 = idle, 1 = main, 2 = other int progressThreadFlag = 0; listener.OnFinished += () => { finishedThreadFlag = ( Thread.CurrentThread.ManagedThreadId == mainThread ? 1 : -1 ); }; listener.OnProgress += (p) => { progressThreadFlag = ( Thread.CurrentThread.ManagedThreadId == mainThread ? 1 : -1 ); }; Assert.AreEqual(0, finishedThreadFlag); Assert.AreEqual(0, progressThreadFlag); bool taskRan = false; listener.CallEventOnMainThread.Value = false; Task.Run(() => { listener.SetProgress(1f); listener.SetFinished(); taskRan = true; }); while (!taskRan) { yield return(null); } Assert.AreEqual(-1, finishedThreadFlag); Assert.AreEqual(-1, progressThreadFlag); }
public Task <IMapset> Load(Guid id, TaskListener <IMapset> listener = null) { return(Task.Run(() => { IMapset mapset = store.Load(id); UnityThread.Dispatch(() => { // If already loaded within all mapsets, replace it. allMapsets.AddOrReplace(mapset); // Search again. Search(lastSearch); // Finished. listener?.SetFinished(mapset); return null; }); return mapset; })); }
public Task <DirectoryInfo> Uncompress(DirectoryInfo destination, TaskListener <DirectoryInfo> listener = null) { return(Task.Run(() => { if (destination == null) { Logger.LogError($"ZipCompressed.Uncompress - destination is null!"); return null; } if (!Source.Exists) { return null; } try { using (var fs = new FileStream(Source.FullName, FileMode.Open, FileAccess.Read, FileShare.Read)) { // Find total size of the zip first. float totalSize = GetUncompressedSize(); // Start unzipping. using (var zis = new ZipInputStream(fs)) { string destPath = destination.FullName; ZipEntry entry; byte[] buffer = new byte[4096]; float curSize = 0; int curInterval = ProgressInterval; while ((entry = zis.GetNextEntry()) != null) { string path = Path.Combine(destPath, entry.Name); if (entry.IsDirectory) { Directory.CreateDirectory(path); } else { // Create missing subdirectories. Directory.CreateDirectory(Path.GetDirectoryName(path)); // Start write for this entry. using (FileStream writer = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Write)) { int length; while ((length = zis.Read(buffer, 0, buffer.Length)) > 0) { writer.Write(buffer, 0, length); // Track progress and report. curSize += length; curInterval--; if (curInterval <= 0) { curInterval = ProgressInterval; listener?.SetProgress(curSize / totalSize); } } writer.Flush(); } } } } } listener?.SetFinished(destination); return destination; } catch (Exception e) { Logger.LogError($"ZipCompressed.Uncompress - Error: {e}"); return null; } })); }
public async Task <T> Import(FileInfo archive, bool deleteOnImport = true, TaskListener <T> listener = null) { if (archive == null) { throw new ArgumentNullException(nameof(archive)); } if (!archive.Exists) { throw new FileNotFoundException($"File at ({archive.FullName}) does not exist!"); } // Retrieve the compressed file representation of the archive. var compressed = CompressedHelper.GetCompressed(archive); if (compressed == null) { throw new NotImportableException(archive, GetType()); } // Start extraction of archive. var extractedDir = await compressed.Uncompress(GetTempExtractDir(archive), listener?.CreateSubListener <DirectoryInfo>()); if (!extractedDir.Exists) { throw new NotImportableException(archive, GetType()); } // Parse the data at temporary extraction destination. var data = ParseData(extractedDir); // Failed to parse. if (data == null) { listener?.SetFinished(); return(default(T)); } // Calculate hash code. data.CalculateHash(); // Check whether this data already exists using hash check. bool isNewData = false; if (ContainsHash(data.HashCode, out T existingData)) { // Replace existing data. PostProcessData(data, existingData.Id); } else { // Allocate a new Id. PostProcessData(data, Guid.NewGuid()); isNewData = true; } // Move the extracted data under management of the storage. storage.Move(data.Id.ToString(), extractedDir); // Replace or add the data to database. database.Edit().Write(data).Commit(); // Delete archive if (deleteOnImport) { archive.Delete(); } // Report finished. listener?.SetValue(data); listener?.SetFinished(); if (isNewData) { OnNewData?.Invoke(data); } return(data); }
public void StartTask(TaskListener <TOutput> listener = null) { ListenToRequest(listener, () => listener?.SetFinished(Output)); request.Request(); }
public Task <TOutput[]> StartLoad(int taskCount, List <TInput> inputs, TOutput[] outputs = null, TaskListener <TOutput[]> listener = null) { if (taskCount < 1) { throw new Exception("Task count must be 1 or greater."); } if (inputs == null) { throw new ArgumentNullException(nameof(inputs)); } if (outputs != null && outputs.Length < inputs.Count) { throw new ArgumentException("The outputs array length is less than the input count."); } return(Task.Run <TOutput[]>(() => { taskCount = Math.Min(taskCount, inputs.Count); if (outputs == null) { outputs = new TOutput[inputs.Count]; } object inputLocker = new object(); object finishLocker = new object(); int curInputIndex = 0; int finishedCount = 0; int loadStartTime = DateTime.UtcNow.Millisecond; for (int i = 0; i < taskCount; i++) { Task.Run(() => { while (true) { // Retrieve the index of next input to process. int inx = -1; lock (inputLocker) { inx = curInputIndex++; } if (inx >= inputs.Count) { break; } outputs[inx] = loadHandler.Invoke(inputs[inx]); lock (finishLocker) { listener?.SetProgress((float)(finishedCount + 1) / inputs.Count); finishedCount++; } } }); } while (true) { Thread.Sleep(CompletionCheckInterval); if (Timeout.HasValue) { int elapsed = DateTime.UtcNow.Millisecond - loadStartTime; if (elapsed > Timeout.Value) { throw new TimeoutException("The loading process has taken longer than expected."); } } if (finishedCount >= inputs.Count) { break; } } listener?.SetFinished(outputs); return outputs; })); }
void ITask.StartTask(TaskListener listener) { ListenToRequest(listener, () => listener?.SetFinished()); request.Request(); }
public Task <DirectoryInfo> Uncompress(DirectoryInfo destination, TaskListener <DirectoryInfo> listener = null) => Task.Run(() => { listener?.SetFinished(null); return(null as DirectoryInfo); });