void CreateStatusObject(FileObjectMapping file, Package package) { StatusDic[file.FilePath] = new Status(file.FilePath, package.StatusRepo) { RealObject = Package.GetObjectPathFromChecksum(file) }; }
void ProcessFoundByFilePath(FileObjectMapping file, KeyValuePair <string, string> found) { if (found.Value.Equals(file.Checksum)) { Equal.Add(file.FilePath); } else { Update.Add(file.FilePath); } }
void UpdateFileMetaData(PackageMetaData metaData, FileObjectMapping x, ICollection <string> paths) { metaData.Size += new FileInfo(Path.Combine(WorkingPath.ToString(), x.FilePath)).Length; var path = Repository.GetObjectPath(x.Checksum); if (paths.Contains(path.ToString())) { return; } paths.Add(path.ToString()); metaData.SizePacked += new FileInfo(path.ToString()).Length; }
void ProcessObject(bool skipWhenLocalFileMatches, FileObjectMapping o, ICollection <FileObjectMapping> validObjects) { if (skipWhenLocalFileMatches) { // We can also skip objects that already match in the working directory so that we don't waste time on compressing or copying objects needlessly // this however could create more bandwidth usage in case the user in the future deletes working files, and tries to get the version again // in that case the objects will need to be redownloaded, or at least patched up from other possible available objects. var path = WorkingPath.GetChildFileWithName(o.FilePath); if (path.Exists && Repository.GetChecksum(path).Equals(o.Checksum)) { validObjects.Add(o); if (Common.Flags.Verbose) { MainLog.Logger.Info( $"Marking {o.FilePath} ({o.Checksum}) as valid, because the local object matches"); } return; } var oPath = Repository.GetObjectPath(o.Checksum); if (oPath.Exists) { validObjects.Add(o); if (Common.Flags.Verbose) { MainLog.Logger.Info( $"Marking {o.FilePath} ({o.Checksum}) as valid, because the packed object exists"); } // Don't readd object because we can't validate if the Checksum is in order.. } } else { var ob = Repository.GetObject(o.Checksum); if (ob == null) { return; } var oPath = Repository.GetObjectPath(o.Checksum); if (oPath.Exists && Repository.GetChecksum(oPath).Equals(ob.ChecksumPack)) { validObjects.Add(o); if (Common.Flags.Verbose) { MainLog.Logger.Info( $"Marking {o.FilePath} ({o.Checksum}) as valid, because the packed object matches"); } } } }
void ProcessNotFoundByFilePath(FileObjectMapping file, Dictionary <string, string> changeDictionary) { var found = changeDictionary.FirstOrDefault(x => x.Value.Equals(file.Checksum)); if (found.Key != null) { ProcessFoundByChecksum(file, found.Key); } else { New.Add(file.FilePath); } }
void ProcessModified(IDictionary <string, Status> statusDic, FileObjectMapping fcm, Action <double, long?> act) { var status = statusDic[fcm.FilePath]; status.Progress = 0; status.Action = RepoStatus.Unpacking; var destFile = WorkingPath.GetChildFileWithName(fcm.FilePath); var packedFile = Repository.GetObjectPath(fcm.Checksum); destFile.ParentDirectoryPath.MakeSurePathExists(); Tools.Compression.Gzip.UnpackSingleGzip(packedFile, destFile, new StatusWrapper(status, act)); status.EndOutput(); }
void ProcessFoundByChecksum(FileObjectMapping file, string found) { if (found.Equals(file.FilePath, StringComparison.OrdinalIgnoreCase)) { ChangedCase.Add(found, file.FilePath); } else if (Copy.ContainsKey(found)) { Copy[found].Add(file.FilePath); } else { Copy.Add(found, new List <string> { file.FilePath }); } }
void EnumerateChanges(FileObjectMapping file, Dictionary <string, string> changeDictionary, Package package) { if (!StatusDic.ContainsKey(file.FilePath)) { CreateStatusObject(file, package); } var found = changeDictionary.FirstOrDefault(x => x.Key.Equals(file.FilePath)); if (found.Key != null) { ProcessFoundByFilePath(file, found); } else { ProcessNotFoundByFilePath(file, changeDictionary); } }
public ObjectMap(FileObjectMapping fo) { FO = fo; }
static string GetObjectPathFromChecksum(FileObjectMapping fileObjectMapping) => "objects/" + fileObjectMapping.Checksum.Substring(0, 2) + "/" + fileObjectMapping.Checksum.Substring(2);