private void uploadFiles(List <FileItem> filesToUpload) { this.uploadedBytes.Clear(); ManualResetEvent[] doneEvents = null; int taskMax = filesToUpload.Count; doneEvents = new ManualResetEvent[taskMax]; this.uploadInfos = new UploadInfo[taskMax]; for (int taskId = 0; taskId < taskMax; taskId++) { this.uploadInfos[taskId] = new UploadInfo(); doneEvents[taskId] = new ManualResetEvent(false); FileUploader uploader = new FileUploader(this.syncSetting, doneEvents[taskId], this, taskId, upctl); ThreadPool.QueueUserWorkItem(new WaitCallback(uploader.uploadFile), filesToUpload[taskId]); } try { WaitHandle.WaitAll(doneEvents); CachedHash.BatchInsertOrUpdate(filesToUpload, localHashDB); SyncLog.BatchInsertOrUpdate(filesToUpload, syncLogDB); } catch (Exception ex) { Log.Error("wait for job to complete error, " + ex.Message); } }
/// <summary> /// 更新 2016-09-22 16:40 fengyh /// </summary> private void processUpload() { try { while (currentIndex < uploadItems.Count) { if (this.cancelSignal) { return; } int itemsLeft = uploadItems.Count - currentIndex; if (itemsLeft < this.syncSetting.SyncThreadCount) { this.uploadFiles(itemsLeft); currentIndex += itemsLeft; } else { this.uploadFiles(this.syncSetting.SyncThreadCount); currentIndex += this.syncSetting.SyncThreadCount; } } #region UPDATE_HASH_DB if (dbItems.Count > 0) { SQLiteConnection sqlConn = null; try { if (!File.Exists(this.localHashDBPath)) { CachedHash.CreateCachedHashDB(this.localHashDBPath); } var qsb = new SQLiteConnectionStringBuilder { DataSource = this.localHashDBPath }; sqlConn = new SQLiteConnection(qsb.ToString()); sqlConn.Open(); CachedHash.BatchInsertOrUpdate(dbItems, sqlConn); } catch (Exception ex) { Log.Error(ex.Message); } finally { if (sqlConn != null) { sqlConn.Close(); } } } #endregion UPDATE_HASH_DB } catch (Exception ex) { Log.Fatal(ex.Message); } }