/// <summary> /// /// </summary> /// <param name="onUpdate"></param> /// <returns></returns> IEnumerator LoadResourcesFromQueue(OnUpdateProgress onUpdate) { int total = m_loadingQueue.Count; LoadingProgressInfo prog = new LoadingProgressInfo(LoadingProgressType.Resource, m_loadingQueue.Count); onUpdate(prog); yield return(null); while (m_loadingQueue.Count > 0) { ResInfo res = m_loadingQueue.Dequeue(); switch (res.type) { case DataType.UnitData: LoadUnit(res.path); break; case DataType.ProjectileData: LoadProjectile(res.path); break; } prog.value += 1.0f; ; onUpdate(prog); yield return(null); } // yield return *; }
public void Success(string message, float?progress = null) { if (OnSuccess != null) { OnSuccess.Invoke(message); } if (OnUpdateProgress != null && progress.HasValue) { OnUpdateProgress.Invoke(progress.Value); } }
/// <summary> /// Append source files to the end of original file. /// </summary> /// <param name="sourceFiles">The list of source files</param> /// <param name="originalFile">Original file to append source files to the end of it</param> public void Append(string[] sourceFiles, string originalFile) { PdfDocument pdfOriginal = null; PdfDocument pdfTemp = null; PdfMerger pdfMerger = null; try { pdfOriginal = new PdfDocument(new PdfReader(originalFile)); // Step 1: Temporary file to prevent rewrite on the original file. // Destination file will be changed with the original in the last step. // In case if the destination file in an existing file pdfTemp = new PdfDocument(new PdfWriter(originalFile + "temp")); pdfMerger = new PdfMerger(pdfTemp); // Step 2: First add the original file content. pdfMerger.Merge(pdfOriginal, 1, pdfOriginal.GetNumberOfPages()); // Step 3: Then add the other source file content for (var i = 0; i < sourceFiles.Length; i++) { using (var sourceFile = new PdfDocument(new PdfReader(sourceFiles[i]))) { pdfMerger.Merge(sourceFile, 1, sourceFile.GetNumberOfPages()); // Update merge job progress. OnUpdateProgress?.Invoke(i); } } pdfTemp.Close(); pdfOriginal.Close(); pdfMerger.Close(); // Step 4: Replace the original file with the temp one. File.Delete(originalFile); File.Move(originalFile + "temp", originalFile); } catch (Exception e) { if (pdfTemp != null && !pdfTemp.IsClosed()) { pdfTemp.Close(); pdfOriginal.Close(); pdfMerger?.Close(); } File.Delete(originalFile + "temp"); throw new Exception(e.Message); } }
private DownloadFile HandleResponse(WebResponse response, CancellationToken token) { var contentLength = response.ContentLength; if (contentLength < 0) { var txt = $"Could not get a proper content length for DownloadFile[{DisplayText}]"; Logging.tML.Error(txt); throw new Exception(txt); } string _downloadPath = $"{new FileInfo(FilePath).Directory.FullName}{Path.DirectorySeparatorChar}{DateTime.Now.Ticks}{TEMP_EXTENSION}"; _fileStream = new FileStream(_downloadPath, FileMode.Create); var responseStream = response.GetResponseStream(); int currentIndex = 0; byte[] buf = new byte[CHUNK_SIZE]; try { // Use a standard read loop, attempting to read small amounts causes it to lock up and die on mono int r; while ((r = responseStream.Read(buf, 0, buf.Length)) > 0) { token.ThrowIfCancellationRequested(); _fileStream.Write(buf, 0, r); currentIndex += r; OnUpdateProgress?.Invoke((float)(currentIndex / (double)contentLength)); } } catch (OperationCanceledException e) { AbortDownload(_downloadPath); Logging.tML.Info($"DownloadFile[{DisplayText}] operation was cancelled", e); } catch (Exception e) { AbortDownload(_downloadPath); Logging.tML.Info("Unknown error", e); } if (!_aborted) { _fileStream?.Close(); PreCopy(); File.Copy(_downloadPath, FilePath, true); File.Delete(_downloadPath); OnComplete?.Invoke(); } return(this); }
public IEnumerator LoadResourcesFromQueueAndReplaceScene(OnUpdateProgress onUpdate, CustomCoroutineFunction custom) { yield return(LoadResourcesFromQueue(onUpdate)); yield return(ReplaceScene(onUpdate)); if (custom != null) { yield return(custom(onUpdate)); } LoadingProgressInfo prog = new LoadingProgressInfo(LoadingProgressType.Done, 0.0f); onUpdate(prog); }
/// <summary> /// Split specific pages from the source file and save them in the destination file. /// </summary> /// <param name="sourceFile">The source file to split pages from it</param> /// <param name="destinationFile">The destination file to save splitted pages into it</param> /// <param name="pageRange">Specific pages to split them</param> public void Split(string sourceFile, string destinationFile, int[] pageRange) { PdfDocument pdfDestination = null; PdfDocument pdfSource = null; PdfMerger pdfMerger = null; try { // Step 1: Temporary file to prevent rewrite on the original file. // Destination file will be changed with the original in the last step. // In case if the destination file in an existing file pdfDestination = new PdfDocument(new PdfWriter(destinationFile + "temp")); pdfMerger = new PdfMerger(pdfDestination); pdfSource = new PdfDocument(new PdfReader(sourceFile)); // Extract and merge each page from the source page. for (var i = 0; i < pageRange.Length; i++) { pdfMerger.Merge(pdfSource, pageRange[i], pageRange[i]); OnUpdateProgress?.Invoke(i); } pdfDestination.Close(); pdfMerger.Close(); pdfSource.Close(); // Step 5: Replace the original file with the temp one. File.Delete(destinationFile); File.Move(destinationFile + "temp", destinationFile); } catch (Exception e) { if (pdfDestination != null && !pdfDestination.IsClosed()) { pdfDestination.AddNewPage(); pdfDestination.Close(); pdfMerger?.Close(); pdfSource?.Close(); } File.Delete(destinationFile + "temp"); throw new Exception(e.Message); } }
private async Task <List <Champion> > GetChampions(CancellationToken ct) { var championList = new List <Champion>(); var latestVersion = await GetLatestVersion(ct); var url = string.Format(cChampionsDataUrl, latestVersion); var championsJson = await Utilities.GetApiResponse(url, ct); var champions = JsonConvert.DeserializeObject <JObject>(championsJson); var data = champions.GetValue("data", StringComparison.OrdinalIgnoreCase); var children = data.Children <JProperty>(); // 6 requests: // data, square, q, w, e, r _total += children.Count() * 6; foreach (var x in children) { var id = x.Name; url = string.Format(cChampionDataUrl, latestVersion, id); var championJson = await Utilities.GetApiResponse(url, ct); var champion = JsonConvert.DeserializeObject <JObject>(championJson); var detailedData = champion.GetValue("data", StringComparison.OrdinalIgnoreCase).First.First; var name = detailedData["name"].Value <string>(); var spells = JsonConvert.DeserializeObject <List <Spell> >(detailedData["spells"].ToString()); championList.Add(new Champion { Id = id, Name = name, Spells = spells, }); _progress++; OnUpdateProgress?.Invoke(this, new UpdateEventArgs((int)((double)_progress / _total * 100))); } return(championList); }
/// <summary> /// Http下载文件 /// </summary> /// <param name="aURL">url文件地址</param> /// <param name="outFile">下载文件地址</param> /// <param name="updateProgress">下载进度过程</param> public bool DownloadFile(string aURL, string outFile, OnUpdateProgress updateProgress = null) { try { DownOrUp = 1; //下载标识 if ((!BrokenOpen) && (File.Exists(outFile))) { File.Delete(outFile); } downFileName = outFile; DownFileStream = new FileStream(outFile, FileMode.OpenOrCreate); StartDownPosion = DownFileStream.Length; DownFileStream.Seek(DownFileStream.Length, SeekOrigin.Current); onUpProgress = updateProgress; BeginTime = DateTime.Now; initCurl(aURL); easy.WriteFunction = OnWriteData; easy.ResumeFrom = DownFileStream.Length; easy.Perform(); if (easy.ResponseCode == 200) { return(true); } else { throw new Exception(string.Format("HTTP返回:{0}", easy.ResponseCode)); } } catch (Exception ex) { LogLocal.log().SaveLog(new LogEntity("文件下载失败:" + ex.Message, LogType.Plat, LogLevel.ERROR)); throw ex; } finally { easy = null; DownFileStream.Flush(); DownFileStream.Close(); DownFileStream.Dispose(); DownOrUp = 0; } }
private void RunTimers() { TimerCallback timerCallbackUpdater = new TimerCallback((target) => { float percent = (float)CurrentTotalBytes / Length; OnUpdateProgress.Invoke(this, new OnUpdateProgressEventArgs { Percent = percent, Bytes = CurrentTotalBytes }); if (Done || IsPaused || IsCancelled) { TimerUpdater.Dispose(); } }); TimerUpdater = new Timer(timerCallbackUpdater, null, 0, UpdateInterval); OnUpdateSpeedEventArgs onUpdateSpeedEventArgs = new OnUpdateSpeedEventArgs { PreviousBytes = CurrentTotalBytes, NowBytes = CurrentTotalBytes }; TimerCallback timerCallbackSpeedCalculator = new TimerCallback((target) => { onUpdateSpeedEventArgs.NowBytes = CurrentTotalBytes; Speed = onUpdateSpeedEventArgs.NowBytes - onUpdateSpeedEventArgs.PreviousBytes; if (Speed != 0) { EstimatedTime = (Length - CurrentTotalBytes) / Speed; } else { EstimatedTime = long.MaxValue; } onUpdateSpeedEventArgs.PreviousBytes = onUpdateSpeedEventArgs.NowBytes; if (Done || IsPaused || IsCancelled) { Speed = 0; EstimatedTime = long.MaxValue; TimerSpeedCalculator.Dispose(); } onUpdateSpeedEventArgs.Speed = Speed; onUpdateSpeedEventArgs.EstimatedTime = EstimatedTime; OnUpdateSpeed.Invoke(this, onUpdateSpeedEventArgs); }); TimerSpeedCalculator = new Timer(timerCallbackSpeedCalculator, null, 0, 1000); }
/// <summary> /// Merge source files together and save them into destination file. /// </summary> /// <param name="sourceFiles">The list of source files</param> /// <param name="destinationFile">Destination file to save merged files into it</param> public void Merge(string[] sourceFiles, string destinationFile) { PdfDocument pdfDestination = null; PdfMerger pdfMerger = null; try { // Step 1: Temporary file to prevent rewrite on the original file. // Destination file will be changed with the original in the last step. // In case if the destination file in an existing file pdfDestination = new PdfDocument(new PdfWriter(destinationFile + "temp")); pdfMerger = new PdfMerger(pdfDestination); for (var i = 0; i < sourceFiles.Length; i++) { using (var pdfSource = new PdfDocument(new PdfReader(sourceFiles[i]))) { pdfMerger.Merge(pdfSource, 1, pdfSource.GetNumberOfPages()); // Update merge job progress. OnUpdateProgress?.Invoke(i); } } pdfDestination.Close(); pdfMerger.Close(); // Step 2: Replace the destination file with the temp one. File.Delete(destinationFile); File.Move(destinationFile + "temp", destinationFile); } catch (Exception e) { if (pdfDestination != null && !pdfDestination.IsClosed()) { pdfDestination.AddNewPage(); pdfDestination.Close(); pdfMerger?.Close(); } File.Delete(destinationFile + "temp"); throw new Exception(e.Message); } }
IEnumerator ReplaceScene(OnUpdateProgress onUpdate) { m_nextSceneAop = SceneManager.LoadSceneAsync(m_nextScene); m_nextSceneAop.allowSceneActivation = false; LoadingProgressInfo prog = new LoadingProgressInfo(LoadingProgressType.Scene, 0.9f); onUpdate(prog); yield return(null); while (m_nextSceneAop.progress < 0.9f) { prog.value = m_nextSceneAop.progress; onUpdate(prog); yield return(null); } prog.value = 0.9f; onUpdate(prog); yield return(null); }
/// <summary> /// 异步加载场景 /// 多用于 /// </summary> /// <param name="sceneName"></param> private static IEnumerator LoadAsync(string sceneName, OnUpdateProgress updateProgress, OnLoaderEnd onEndLoad = null) { int showProgress = 0; int toProgress = 0; AsyncOperation operation = SceneManager.LoadSceneAsync(sceneName); operation.allowSceneActivation = false; if (operation.progress < 0.9) { toProgress = (int)operation.progress * 100; while (showProgress < toProgress) { ++showProgress; updateProgress(showProgress); yield return(new WaitForEndOfFrame()); } } toProgress = 100; while (showProgress < toProgress) { ++showProgress; updateProgress(showProgress); if (showProgress == 99) { break; } yield return(new WaitForEndOfFrame()); } onEndLoad(sceneName); operation.allowSceneActivation = true; //_asyncOperation = SceneManager.LoadSceneAsync(sceneName); //if (_asyncOperation != null) //{ // onBeginLoad(_asyncOperation); //} //yield return _asyncOperation; //onEndLoad(sceneName); }
/// <summary> /// HTTP上传文件 /// </summary> /// <param name="aURL">上传URL地址</param> /// <param name="upFile">待上传文件</param> /// <param name="uploadedSize">已上传文件大小(字节)</param> /// <param name="updateProgress">下载进度过程</param> public bool UploadFile(string aURL, string upFile, long uploadedSize = 0, OnUpdateProgress updateProgress = null) { try { DownOrUp = 2; //上传标识 initCurl(aURL); UpLoadedSize = uploadedSize; easy.ReadFunction = OnReadData; byte[] UploadData = File.ReadAllBytes(upFile); UploadStream = new MemoryStream(UploadData); UploadStream.Seek(UpLoadedSize, SeekOrigin.Current); easy.ReadData = UploadData; onUpProgress = updateProgress; BeginTime = DateTime.Now; easy.Upload = true; easy.InfileSize = UploadData.Length; easy.Post = true; easy.Perform(); if (easy.ResponseCode == 200) { return(true); } else { throw new Exception(string.Format("HTTP返回:{0}", easy.ResponseCode)); } } catch (Exception ex) { LogLocal.log().SaveLog(new LogEntity(string.Format("文件上传URL:\"{0}\" 上传失败:{1}", aURL, ex.Message), LogType.Plat, LogLevel.ERROR)); throw ex; } finally { easy = null; UploadStream.Flush(); UploadStream.Close(); UploadStream.Dispose(); DownOrUp = 0; } }
protected void UpdateProgress(double progress) { OnUpdateProgress?.Invoke(progress); }
public static void LoadSceneAsync(string sceneName, OnUpdateProgress updateProgress, OnLoaderEnd endLoad) { CoroutineTool.Instance.StartCoroutine(LoadAsync(sceneName, updateProgress, endLoad)); }
private void OnUpdateProgressInternal(float progress, IntPtr userData) { OnUpdateProgress?.Invoke(progress); }
private async Task UpdateData(string version, string path, CancellationToken ct) { Logger.Instance.LogMessage(TracingLevel.DEBUG, "UpdateData - initiated"); var champions = await GetChampions(ct); var summonerSpells = await GetSummonerSpells(ct); var items = await GetItems(ct); Directory.CreateDirectory(_championImageFolder); Directory.CreateDirectory(_spellImageFolder); Directory.CreateDirectory(_summonerSpellImageFolder); Directory.CreateDirectory(_itemImageFolder); using (var wc = new WebClient()) { foreach (var champion in champions) { var url = string.Format(cChampionImageUrl, version, champion.Id); var imgPath = Path.Combine(_championImageFolder, $"{champion.Id}.png"); await wc.DownloadFileTaskAsync(url, imgPath); _progress++; OnUpdateProgress?.Invoke(this, new UpdateEventArgs((int)((double)_progress / _total * 100))); foreach (var spell in champion.Spells) { url = string.Format(cSpellImageUrl, version, spell.Id); imgPath = Path.Combine(_spellImageFolder, $"{spell.Id}.png"); await wc.DownloadFileTaskAsync(url, imgPath); _progress++; OnUpdateProgress?.Invoke(this, new UpdateEventArgs((int)((double)_progress / _total * 100))); } } foreach (var summonerSpell in summonerSpells) { var url = string.Format(cSpellImageUrl, version, summonerSpell.Id); var imgPath = Path.Combine(_summonerSpellImageFolder, $"{summonerSpell.Id}.png"); await wc.DownloadFileTaskAsync(url, imgPath); _progress++; OnUpdateProgress?.Invoke(this, new UpdateEventArgs((int)((double)_progress / _total * 100))); } foreach (var item in items) { var url = string.Format(cItemImageUrl, version, item.Id); var imgPath = Path.Combine(_itemImageFolder, $"{item.Id}.png"); await wc.DownloadFileTaskAsync(url, imgPath); _progress++; OnUpdateProgress?.Invoke(this, new UpdateEventArgs((int)((double)_progress / _total * 100))); } } var leagueDeckVersion = GetLeagueDeckVersion(); _data = new LeagueData { LeagueDeckVersion = leagueDeckVersion, Champions = champions, SummonerSpells = summonerSpells, Items = items, }; var json = JsonConvert.SerializeObject(_data); File.WriteAllText(path, json); Logger.Instance.LogMessage(TracingLevel.DEBUG, "UpdateData - completed"); }