void bw_DoWork(object sender, DoWorkEventArgs e) { BWArgs args = (BWArgs)e.Argument; BWResults results; DirectoryInfo di; try { di = new DirectoryInfo(args.FullPath); } catch { e.Result = new BWResults(null, null, args.FullPath, false); return; } //if (di.Exists) { FolderData fd = new FolderData(di); string[] d = null; try { d = Directory.GetDirectories(di.FullName); } catch { d = StringUtil.Empty; } for (int i = 0; i < d.Length; i++) { d[i] = d[i].Substring(d[i].LastIndexOf(Path.DirectorySeparatorChar) + 1); } results = new BWResults(fd, d, args.FullPath, di.Exists); //} else { //results = new BWResults(fd, StringUtil.Empty, args.FullPath, false); //} e.Result = results; }
void bw_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { BWResults results = (BWResults)e.Result; if (results.Exists & !cancel) { Tree <FolderData> node; node = dataStore.CreatePath(results.FullPath, FolderData.Empty); ProcessedSoFar.CreatePath(results.FullPath, FolderData.Empty); //track what folders we threw in so we can delete if necessary node.Data = results.FD; if (JobBuffer.Peek().Recurse) { for (int i = 0; i < results.Folders.Length; i++) { CurrentJobBuffer.Enqueue(Path.Combine(results.FullPath, results.Folders[i])); } } else { for (int i = 0; i < results.Folders.Length; i++) { dataStore.CreatePath(Path.Combine(results.FullPath, results.Folders[i]), FolderData.Empty); } } } OnFolderProcessed(results.FullPath, true); //OnJobComplete(results.FullPath, JobBuffer.Peek().Recurse); if (CurrentJobBuffer.Count > 0) // finished a sub-job { indexingWorker.RunWorkerAsync(new BWArgs(CurrentJobBuffer.Dequeue(), JobBuffer.Peek().Recurse)); } else //main job is done { OnJobComplete(JobBuffer.Peek().Path, JobBuffer.Peek().Recurse); cancel = false; JobBuffer.Dequeue(); if (JobBuffer.Count > 0) { indexingWorker.RunWorkerAsync(new BWArgs(JobBuffer.Peek().Path, JobBuffer.Peek().Recurse)); } } }