public static void Compute(ref string path, ref DownloadFileInfo file, OnResult on_result) { if (File.Exists(path)) { FunapiManager.instance.StartCoroutine(AsyncCompute(path, file, on_result)); return; } DebugUtils.Log("MD5Async.Compute - Can't find a file.\npath: {0}", path); if (on_result != null) on_result(path, file, false); }
// Downloading files. void downloadResourceFile() { if (state_ != State.Downloading) { return; } if (download_list_.Count <= 0) { updateCachedList(); download_time_.Stop(); FunDebug.Log("Downloader took {0:F2}s for download all files.", download_time_.ElapsedMilliseconds / 1000f); state_ = State.Completed; FunDebug.Log("Downloader - Download completed."); onFinished(DownloadResult.SUCCESS); } else { DownloadFileInfo info = download_list_[0]; // Check directory string path = target_path_; int offset = info.path.LastIndexOf('/'); if (offset >= 0) { path += info.path.Substring(0, offset); } if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } string file_path = target_path_ + info.path; if (File.Exists(file_path)) { File.Delete(file_path); } // Requests a file. string request_url = host_url_ + info.path; FunDebug.DebugLog1("Download a file - {0}\nSave to {1}\n", request_url, file_path); cur_download_path_ = Path.GetDirectoryName(file_path); cur_download_path_ += "/" + Path.GetRandomFileName(); web_client_.DownloadFileAsync(new Uri(request_url), cur_download_path_, info); } }
void loadCachedList() { cached_list_.Clear(); string path = target_path_ + kCachedFileName; if (!File.Exists(path)) { return; } StreamReader stream = File.OpenText(path); string data = stream.ReadToEnd(); stream.Close(); if (data.Length <= 0) { FunDebug.LogWarning("Failed to get a cached file list."); return; } Dictionary <string, object> json = Json.Deserialize(data) as Dictionary <string, object>; List <object> list = json["list"] as List <object>; foreach (Dictionary <string, object> node in list) { DownloadFileInfo info = new DownloadFileInfo(); info.path = node["path"] as string; info.size = Convert.ToUInt32(node["size"]); info.hash = node["hash"] as string; if (node.ContainsKey("front")) { info.hash_front = node["front"] as string; } else { info.hash_front = ""; } cached_list_.Add(info); } FunDebug.DebugLog("Loads cached list : {0}", cached_list_.Count); }
// Downloading files. private void DownloadResourceFile() { if (download_list_.Count <= 0) { UpdateCachedList(); TimeSpan span = new TimeSpan(DateTime.Now.Ticks - check_time_.Ticks); DebugUtils.Log("File download total time - {0:F2}s", span.TotalMilliseconds / 1000f); state_ = State.Completed; DebugUtils.Log("Download completed."); OnFinishedCallback(DownloadResult.SUCCESS); } else { DownloadFileInfo info = download_list_[0]; // Check directory string path = target_path_; int offset = info.path.LastIndexOf('/'); if (offset >= 0) { path += info.path.Substring(0, offset); } if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } string file_path = target_path_ + info.path; if (File.Exists(file_path)) { File.Delete(file_path); } // Requests a file. DebugUtils.Log("Download file - {0}", file_path); retry_download_count_ = 0; cur_download_path_ = Path.GetDirectoryName(file_path); cur_download_path_ += "/" + Path.GetRandomFileName(); web_client_.DownloadFileAsync(new Uri(host_url_ + info.path), cur_download_path_, info); } }
public static void Compute(MonoBehaviour mono, ref string path, ref DownloadFileInfo file, OnResult on_result) { if (File.Exists(path)) { #if !NO_UNITY mono.StartCoroutine(asyncCompute(path, file, on_result)); #else string path_ = path; DownloadFileInfo file_ = file; mono.StartCoroutine(delegate { asyncCompute(path_, file_, on_result); }); #endif return; } FunDebug.Log("MD5Async.Compute - Can't find a file.\npath: {0}", path); if (on_result != null) { on_result(path, file, false); } }
public static IEnumerator Compute(string path, DownloadFileInfo file, OnResult on_result) { if (!File.Exists(path)) { FunDebug.LogWarning("MD5Async.Compute - Can't find a file.\npath: {0}", path); if (on_result != null) { on_result(path, file, false); } yield break; } MD5 md5 = MD5.Create(); int length, read_bytes; byte[] buffer = new byte[kBlockSize]; string md5hash = ""; FileStream stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); if (stream.Length > 0) { if (file.hash_front.Length > 0) { length = (stream.Length < kBlockSize) ? (int)stream.Length : kBlockSize; read_bytes = stream.Read(buffer, 0, length); md5.TransformFinalBlock(buffer, 0, read_bytes); md5hash = FunapiUtils.BytesToHex(md5.Hash); if (md5hash != file.hash_front || length == stream.Length) { stream.Close(); if (on_result != null) { on_result(path, file, md5hash == file.hash_front && md5hash == file.hash); } yield break; } md5.Clear(); md5 = MD5.Create(); stream.Position = 0; yield return(null); } int sleep_count = 0; while (stream.Position < stream.Length) { length = kBlockSize; if (stream.Position + length > stream.Length) { length = (int)(stream.Length - stream.Position); } read_bytes = stream.Read(buffer, 0, length); if (stream.Position < stream.Length) { md5.TransformBlock(buffer, 0, read_bytes, buffer, 0); } else { md5.TransformFinalBlock(buffer, 0, read_bytes); break; } ++sleep_count; if (sleep_count >= kSleepCountMax) { sleep_count = 0; yield return(null); } } } else { md5.TransformFinalBlock(buffer, 0, 0); } stream.Close(); md5hash = FunapiUtils.BytesToHex(md5.Hash); if (on_result != null) { on_result(path, file, md5hash == file.hash); } }
static void asyncCompute(string path, DownloadFileInfo file, OnResult on_result) #endif { MD5 md5 = MD5.Create(); int length, read_bytes; byte[] buffer = new byte[kBlockSize]; string md5hash = ""; FileStream stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); if (stream.Length > 0) { if (file.hash_front.Length > 0) { length = (stream.Length < kBlockSize) ? (int)stream.Length : kBlockSize; read_bytes = stream.Read(buffer, 0, length); md5.TransformFinalBlock(buffer, 0, read_bytes); md5hash = makeHashString(md5.Hash); if (md5hash != file.hash_front || length == stream.Length) { stream.Close(); if (on_result != null) { on_result(path, file, md5hash == file.hash_front && md5hash == file.hash); } #if !NO_UNITY yield break; #else return; #endif } md5.Clear(); md5 = MD5.Create(); stream.Position = 0; #if !NO_UNITY yield return(new WaitForEndOfFrame()); #endif } int sleep_count = 0; while (stream.Position < stream.Length) { length = kBlockSize; if (stream.Position + length > stream.Length) { length = (int)(stream.Length - stream.Position); } read_bytes = stream.Read(buffer, 0, length); if (stream.Position < stream.Length) { md5.TransformBlock(buffer, 0, read_bytes, buffer, 0); } else { md5.TransformFinalBlock(buffer, 0, read_bytes); break; } ++sleep_count; if (sleep_count >= kSleepCountMax) { sleep_count = 0; #if !NO_UNITY yield return(new WaitForEndOfFrame()); #else Thread.Sleep(30); #endif } } } else { md5.TransformFinalBlock(buffer, 0, 0); } stream.Close(); md5hash = makeHashString(md5.Hash); if (on_result != null) { on_result(path, file, md5hash == file.hash); } }
static IEnumerator asyncCompute(string path, DownloadFileInfo file, OnResult on_result)
static IEnumerator AsyncCompute(string path, DownloadFileInfo file, OnResult on_result) { MD5 md5 = MD5.Create(); int length, read_bytes; byte[] buffer = new byte[kBlockSize]; string md5hash = ""; FileStream stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); if (stream.Length > 0) { if (file.hash_front.Length > 0) { length = (stream.Length < kBlockSize) ? (int)stream.Length : kBlockSize; read_bytes = stream.Read(buffer, 0, length); md5.TransformFinalBlock(buffer, 0, read_bytes); md5hash = MakeHashString(md5.Hash); if (md5hash != file.hash_front || length == stream.Length) { stream.Close(); if (on_result != null) on_result(path, file, md5hash == file.hash_front && md5hash == file.hash); yield break; } md5.Clear(); md5 = MD5.Create(); stream.Position = 0; yield return new WaitForEndOfFrame(); } int sleep_count = 0; while (stream.Position < stream.Length) { length = (stream.Position + kBlockSize > stream.Length) ? (int)(stream.Length - stream.Position) : kBlockSize; read_bytes = stream.Read(buffer, 0, length); if (stream.Position < stream.Length) { md5.TransformBlock(buffer, 0, read_bytes, buffer, 0); } else { md5.TransformFinalBlock(buffer, 0, read_bytes); break; } ++sleep_count; if (sleep_count % kMaxSleepCount == 0) yield return new WaitForEndOfFrame(); } } else { md5.TransformFinalBlock(buffer, 0, 0); } stream.Close(); md5hash = MakeHashString(md5.Hash); if (on_result != null) on_result(path, file, md5hash == file.hash); }
// Callback function for list of files private void DownloadDataCompleteCb (object sender, DownloadDataCompletedEventArgs ar) { mutex_.WaitOne(); bool failed = false; try { if (ar.Error != null) { DebugUtils.Log("Exception Error: {0}", ar.Error); failed = true; } else { // It can be true when CancelAsync() called in Stop(). if (ar.Cancelled) return; // Parse json string data = Encoding.ASCII.GetString(ar.Result); Dictionary<string, object> json = Json.Deserialize(data) as Dictionary<string, object>; //DebugUtils.Log("Json data >> {0}", data); // Redirect url if (json.ContainsKey("url")) { string url = json["url"] as string; if (url[url.Length - 1] != '/') url += "/"; host_url_ = url; DebugUtils.Log("Redirect download url: {0}", host_url_); } List<object> list = json["data"] as List<object>; if (list.Count <= 0) { DebugUtils.Log("Invalid list data. List count is 0."); DebugUtils.Assert(false); failed = true; } else { download_list_.Clear(); foreach (Dictionary<string, object> node in list) { DownloadFileInfo info = new DownloadFileInfo(); info.path = node["path"] as string; info.size = Convert.ToUInt32(node["size"]); info.hash = node["md5"] as string; if (node.ContainsKey("md5_front")) info.hash_front = node["md5_front"] as string; else info.hash_front = ""; download_list_.Add(info); } // Checks files manager_.AddEvent(() => manager_.StartCoroutine(CheckFileList(download_list_))); } } } catch (Exception e) { DebugUtils.Log("Failure in DownloadDataCompleteCb: {0}", e.ToString()); failed = true; } finally { mutex_.ReleaseMutex(); } if (failed) { Stop(); } }
private void LoadCachedList () { cached_list_.Clear(); string path = target_path_ + kCachedFileName; if (!File.Exists(path)) return; StreamReader stream = File.OpenText(path); string data = stream.ReadToEnd(); stream.Close(); if (data.Length <= 0) { DebugUtils.LogWarning("Failed to get a cached file list."); return; } Dictionary<string, object> json = Json.Deserialize(data) as Dictionary<string, object>; List<object> list = json["list"] as List<object>; foreach (Dictionary<string, object> node in list) { DownloadFileInfo info = new DownloadFileInfo(); info.path = node["path"] as string; info.size = Convert.ToUInt32(node["size"]); info.hash = node["hash"] as string; if (node.ContainsKey("front")) info.hash_front = node["front"] as string; else info.hash_front = ""; cached_list_.Add(info); } DebugUtils.DebugLog("Loads cached list : {0}", cached_list_.Count); }
// Callback function for list of files void downloadDataCompleteCb(object sender, DownloadDataCompletedEventArgs ar) { mutex_.WaitOne(); bool failed = false; try { if (ar.Error != null) { FunDebug.Log("Exception Error: {0}", ar.Error); failed = true; } else { // It can be true when CancelAsync() called in Stop(). if (ar.Cancelled) { return; } // Parse json string data = Encoding.UTF8.GetString(ar.Result); Dictionary <string, object> json = Json.Deserialize(data) as Dictionary <string, object>; //FunDebug.Log("Json data >> {0}", data); // Redirect url if (json.ContainsKey("url")) { string url = json["url"] as string; if (url[url.Length - 1] != '/') { url += "/"; } host_url_ = url; FunDebug.Log("Redirect download url: {0}", host_url_); } List <object> list = json["data"] as List <object>; if (list.Count <= 0) { FunDebug.Log("Invalid list data. List count is 0."); FunDebug.Assert(false); failed = true; } else { download_list_.Clear(); foreach (Dictionary <string, object> node in list) { DownloadFileInfo info = new DownloadFileInfo(); info.path = node["path"] as string; info.size = Convert.ToUInt32(node["size"]); info.hash = node["md5"] as string; if (node.ContainsKey("md5_front")) { info.hash_front = node["md5_front"] as string; } else { info.hash_front = ""; } download_list_.Add(info); } // Checks files #if !NO_UNITY event_list.Add(() => mono.StartCoroutine(checkFileList(download_list_))); #else event_list.Add(() => mono.StartCoroutine(() => checkFileList(download_list_))); #endif } } } catch (Exception e) { FunDebug.Log("Failure in downloadDataCompleteCb: {0}", e.ToString()); failed = true; } finally { mutex_.ReleaseMutex(); } if (failed) { Stop(); } }
void checkFileList(List <DownloadFileInfo> list) #endif { List <DownloadFileInfo> tmp_list = new List <DownloadFileInfo>(list); List <string> verify_file_list = new List <string>(); List <string> remove_list = new List <string>(); Queue <int> rnd_list = new Queue <int>(); bool verify_success = true; int rnd_index = -1; DateTime cached_time = File.GetLastWriteTime(target_path_ + kCachedFileName); Stopwatch elapsed_time = new Stopwatch(); elapsed_time.Start(); delete_file_list_.Clear(); // Randomly check list if (cached_list_.Count > 0) { int max_count = cached_list_.Count; int count = Math.Min(Math.Max(1, max_count / 10), 10); System.Random rnd = new System.Random((int)DateTime.Now.Ticks); while (rnd_list.Count < count) { rnd_index = rnd.Next(1, max_count + 1) - 1; if (!rnd_list.Contains(rnd_index)) { rnd_list.Enqueue(rnd_index); } } FunDebug.DebugLog("Random check file count is {0}", rnd_list.Count); rnd_index = rnd_list.Count > 0 ? rnd_list.Dequeue() : -1; } // Checks local files int index = 0; foreach (DownloadFileInfo file in cached_list_) { DownloadFileInfo item = list.Find(i => i.path == file.path); if (item != null) { string path = target_path_ + file.path; FileInfo info = new FileInfo(path); if (!File.Exists(path) || item.size != info.Length || item.hash != file.hash) { remove_list.Add(file.path); } else { string filename = Path.GetFileName(item.path); if (filename[0] == '_' || index == rnd_index || File.GetLastWriteTime(path).Ticks > cached_time.Ticks) { if (index == rnd_index) { rnd_index = rnd_list.Count > 0 ? rnd_list.Dequeue() : -1; } verify_file_list.Add(file.path); MD5Async.Compute(mono, ref path, ref item, delegate(string p, DownloadFileInfo f, bool is_match) { if (VerifyCallback != null) { VerifyCallback(p); } verify_file_list.Remove(f.path); if (is_match) { list.Remove(f); } else { remove_list.Add(f.path); verify_success = false; } } ); } else { list.Remove(item); } } } else { remove_list.Add(file.path); } ++index; } while (verify_file_list.Count > 0) { #if !NO_UNITY yield return(new WaitForSeconds(0.1f)); #else Thread.Sleep(100); #endif } removeCachedList(remove_list); FunDebug.Log("Random validation has {0}", (verify_success ? "succeeded" : "failed")); // Checks all local files if (!verify_success) { foreach (DownloadFileInfo file in cached_list_) { DownloadFileInfo item = tmp_list.Find(i => i.path == file.path); if (item != null) { verify_file_list.Add(file.path); string path = target_path_ + file.path; MD5Async.Compute(mono, ref path, ref item, delegate(string p, DownloadFileInfo f, bool is_match) { if (VerifyCallback != null) { VerifyCallback(p); } verify_file_list.Remove(f.path); if (!is_match) { remove_list.Add(f.path); if (!list.Contains(f)) { list.Add(f); } } } ); } } while (verify_file_list.Count > 0) { #if !NO_UNITY yield return(new WaitForSeconds(0.1f)); #else Thread.Sleep(100); #endif } removeCachedList(remove_list); } elapsed_time.Stop(); FunDebug.Log("File check total time - {0:F2}s", elapsed_time.ElapsedMilliseconds / 1000f); total_download_count_ = list.Count; foreach (DownloadFileInfo item in list) { total_download_size_ += item.size; } if (total_download_count_ > 0) { state_ = State.Ready; event_list.Add(delegate { if (ReadyCallback != null) { ReadyCallback(total_download_count_, total_download_size_); } }); } else { deleteLocalFiles(); updateCachedList(); state_ = State.Completed; FunDebug.Log("All resources are up to date."); onFinished(DownloadResult.SUCCESS); } }