public static LocalFile getFileInfo(string filePath) { LocalFile currentFile = new LocalFile(); try { int indexOfDot = filePath.LastIndexOf('.'); currentFile.type = filePath.Substring(indexOfDot); } catch (Exception e) { currentFile.type = ""; } try { int indexOfSlash = filePath.LastIndexOf('\\'); currentFile.name = filePath.Substring(indexOfSlash + 1); } catch (Exception e) { currentFile.name = filePath; } currentFile.size = new FileInfo(filePath).Length.ToString(); try { int indexOfLastSlash = filePath.LastIndexOf('\\'); currentFile.parentFolder = filePath.Substring(0, indexOfLastSlash); int indexOfSecondSlash = currentFile.parentFolder.LastIndexOf('\\'); currentFile.parentFolder = currentFile.parentFolder.Substring(indexOfSecondSlash + 1); } catch (Exception e) { currentFile.name = filePath; } currentFile.location = filePath; if (currentFile.name.ToLower().Contains("thumbs.db") || currentFile.name.ToLower().Contains("albumart") || currentFile.name.ToLower().Contains("desktop.ini") || currentFile.name.ToLower().Contains("folder.jpg")) { //Utils.writeLog("getFileInfo: Ignored file " + filePath); return(null); } if ((currentFile.hash = GenerateHash(filePath)) == null) { Utils.writeLog("getFileInfo: Unable to generate hash of file " + filePath); return(null); } /* * List<string> propertyHeaders = new List<string>(); * Dictionary<string, string> properties = new Dictionary<string, string>(); * * Shell32.Shell shell = new Shell32.Shell(); * Shell32.Folder locationFolder; * * locationFolder = shell.NameSpace(filePath); * * String s = ""; * * for (int i = 0; i < short.MaxValue; i++) * { * string header = locationFolder.GetDetailsOf(null, i); * if (String.IsNullOrEmpty(header)) * break; * propertyHeaders.Add(header); * } * * foreach (Shell32.FolderItem item in locationFolder.Items()) * { * for (int i = 0; i < propertyHeaders.Count; i++) * { * properties.Add(propertyHeaders[i], locationFolder.GetDetailsOf(item, i)); * s = s + propertyHeaders[i] + " : " + locationFolder.GetDetailsOf(item, i) + "\n"; * } * } * MessageBox.Show(s); */ currentFile.computeKeywords(); return(currentFile); }
public static void buildIndex(CheckedListBox.ObjectCollection sharedFoldersList, BackgroundWorker indexWorker, DoWorkEventArgs e) { Utils.writeLog("buildIndex: Started indexing!"); int numFoldersIndexed = 0; bool newFile = false; DateTime timeOfLastSave = DateTime.Now; //Keeps track of when to persist stuff in the middle of indexing List <string> folders = new List <string>(); foreach (string sharedFolderPath in sharedFoldersList) { folders.Add(sharedFolderPath); } /* INDEXING * -------- * * The idea is to maintain add/remove lists accurately until a successful sync * occurs. At this point we will nullify add/remove list and update fileIndex. * * Add/remove lists: * It turns out that determining removed files is super-fast, because there is * no hash computation involved. The most accurate time to get the list of * removed files is after the computationally-expensive add list is computed. * * So what we do is: * 1. Compare against updatedIndex to determine added files - if new, add to * updatedIndex and addedFiles. If old, do nothing. * 2. After all adds we have an updatedIndex with some removed files as well * 3. Every so often, write everything to disk so that we can pick up where we * left off. * 4. So then we compute removed files by checking against updatedIndex. If * removed, remove from updatedIndex and add to removedFiles * 5. Attempt to sync, if successful nullify add/remove and fileIndex=updated * FileIndex * */ while (folders.Count > 0) { // Persist data from time to time so that we can eventually index very large sets if ((DateTime.Now - timeOfLastSave).TotalMinutes > 2) { Utils.writeLog("buildIndex : Time exceeded 2 minutes, writing indices to disk.."); serializeHashTables(); timeOfLastSave = DateTime.Now; } DirectoryInfo di = new DirectoryInfo(folders[0]); FileInfo[] fileInfoArr = null; DirectoryInfo[] directoryInfoArr = null; try { fileInfoArr = di.GetFiles(); directoryInfoArr = di.GetDirectories(); } catch (Exception u) { folders.RemoveAt(0); Utils.writeLog("ERROR! buildIndex: Exception while indexing " + di.FullName + " Error: " + u); continue; } // Add all the sub folders to the processing queue if (directoryInfoArr != null) { foreach (DirectoryInfo currentDirectory in directoryInfoArr) { folders.Add(currentDirectory.FullName); } } if (fileInfoArr != null) { foreach (FileInfo fi in fileInfoArr) { if (modifiedIndex.ContainsKey(fi.FullName)) { // If file has not been modified, it's already in updatedIndex if ((DateTime)modifiedIndex[fi.FullName] == fi.LastWriteTime) { //Utils.writeLog("buildIndex: Old file not modified : " + fi.FullName); continue; } else { Utils.writeLog("buildIndex: Old file modified : " + fi.FullName); } } else { newFile = true; } // Get file details, including hash and keywords LocalFile currentFile = getFileInfo(fi.FullName); if (currentFile == null) { //Utils.writeLog("buildIndex: Didn't process file :" + fi.FullName); continue; } if (newFile) { Utils.writeLog("buildIndex: New file seen : " + fi.FullName); } newFile = false; hashIndex[fi.FullName] = currentFile.hash; modifiedIndex[fi.FullName] = fi.LastWriteTime; addedFiles[currentFile.hash] = currentFile; updatedIndex[currentFile.hash] = currentFile; if (indexWorker.CancellationPending) { e.Cancel = true; Utils.writeLog("buildIndex: Cancelled indexing. All changes lost."); return; } } folders.Remove(folders[0]); numFoldersIndexed++; } } // Addition is complete, now check for removed files.. Utils.writeLog("buildIndex: File addition complete.."); removedFiles = (Hashtable)fileIndex.Clone(); foreach (string sharedFolderPath in sharedFoldersList) { folders.Add(sharedFolderPath); } while (folders.Count > 0) { DirectoryInfo di = new DirectoryInfo(folders[0]); FileInfo[] fileInfoArr = null; DirectoryInfo[] directoryInfoArr = null; try { fileInfoArr = di.GetFiles(); directoryInfoArr = di.GetDirectories(); } catch (Exception u) { folders.RemoveAt(0); Utils.writeLog("ERROR! buildIndex: Exception while indexing " + di.FullName + " Error: " + u); continue; } // Add all the sub folders to the processing queue if (directoryInfoArr != null) { foreach (DirectoryInfo currentDirectory in directoryInfoArr) { folders.Add(currentDirectory.FullName); } } if (fileInfoArr != null) { foreach (FileInfo fi in fileInfoArr) { // If file hash was found in hashIndex, remove from removedFiles if (hashIndex.ContainsKey(fi.FullName) && updatedIndex.ContainsKey(hashIndex[fi.FullName])) { removedFiles.Remove(hashIndex[fi.FullName]); } } folders.Remove(folders[0]); } } // At this point removedFiles contains list of files in the index that don't // correspond to any files on disk.. // Make sure removed files are gone from the appropriate places foreach (String key in removedFiles.Keys) { String path = ((LocalFile)removedFiles[key]).location; Utils.writeLog("buildIndex: Removed file : " + path); if (addedFiles.ContainsKey(key)) { addedFiles.Remove(key); } if (updatedIndex.ContainsKey(key)) { Utils.writeLog("buildIndex: Found out-of-date key in updatedIndex"); updatedIndex.Remove(key); } if (modifiedIndex.ContainsKey(path)) { modifiedIndex.Remove(path); } } Utils.writeLog("buildIndex: " + removedFiles.Keys.Count + " files removed"); Utils.writeLog("buildIndex: " + addedFiles.Keys.Count + " files added"); serializeHashTables(); Utils.writeLog("buildIndex: Completed indexing of " + numFoldersIndexed + " folders."); }
public static bool upload(string fileHash, NetworkStream fileUploadStream, String transferType, long startByte, String transferId) { Utils.writeLog("upload: Started"); bool successfulTransfer = false; byte[] byteSend = new byte[4096]; string filePath = ""; string fileName = ""; // Get file path from hash. if (transferType == "direct" || transferType == "firstleg") { LocalFile lf = (LocalFile)Indexer.fileIndex[fileHash]; // Got request for a file that we don't have. if (lf == null) { Utils.writeLog("upload: Upload failed. Got request for a file not present in index. Hash:" + fileHash); informServerOfMissingFile(fileHash, transferId); return(false); } filePath = lf.location; } else { filePath = Utils.getAppDataPath(@"\Bounces\" + fileHash + ".bounced"); } FileStream fileLocalStream; try { fileLocalStream = new FileStream(filePath, FileMode.Open, FileAccess.Read); } catch (Exception e) { Utils.writeLog("upload: " + e.ToString()); return(false); } lock (Server.uploadCountLock) { Server.currentUploadsCount++; } BinaryReader binaryFileReader = new BinaryReader(fileLocalStream); FileInfo fileInfo = new FileInfo(filePath); Utils.writeLog("upload: Started reading file from disk : " + filePath); fileName = fileInfo.Name; long bytesUploaded = 0; try { int bytesSize = 0; // Send the file. while ((bytesSize = fileLocalStream.Read(byteSend, 0, byteSend.Length)) > 0) { //Thread.Sleep(100); bytesUploaded = bytesUploaded + bytesSize; if (bytesUploaded > startByte) { fileUploadStream.Write(byteSend, 0, bytesSize); } } Utils.writeLog("upload : Sent file : " + fileName); successfulTransfer = true; } catch (Exception e) { Utils.writeLog("Upload module reported error : " + e.ToString()); successfulTransfer = false; } finally { fileLocalStream.Close(); } lock (Server.uploadCountLock) { Server.currentUploadsCount--; } return(successfulTransfer); }