public Package.ObjectMap[] GetNeededObjects(bool skipWhenFileMatches = true) { var objects = GetMetaDataFilesOrderedBySize().ToList(); var validObjects = new List <FileObjectMapping>(); foreach (var o in objects) { ProcessObject(skipWhenFileMatches, o, validObjects); } objects.RemoveAll(validObjects); var newObjects = objects.Select(x => new Package.ObjectMap(x)).ToArray(); var missingObjects = GetMissingObjectMapping(newObjects).ToList(); if (missingObjects.Any()) { HandleMissingObjects(missingObjects); } Repository.Log("Local object matches {0}, left: {1}", MetaData.Files.Count - objects.Count, objects.Count); return(newObjects); }
public async Task <string[]> Update(IEnumerable <Uri> remotes, StatusRepo repo, bool skipWhenFileMatches = true) { var objects = GetNeededObjects(skipWhenFileMatches); var doneObjects = new ConcurrentBag <FileObjectMapping>(); var relObjects = objects.OrderByDescending(x => Tools.FileUtil.SizePrediction(x.FO.FilePath)) .Select( x => new FileFetchInfo(Repository.GetObjectSubPath(x.FO), x.FO.FilePath) { OnComplete = () => doneObjects.Add(x.FO) }) .ToArray(); StatusRepo.ProcessSize(GetExistingObjects(objects.Select(x => x.FO)), Repository.ObjectsPath, MetaData.SizePacked); // TODO: Abort support! // TODO: Progress fix??! try { await DownloadObjects(remotes, repo, relObjects, Repository.ObjectsPath).ConfigureAwait(false); } finally { Repository.ReAddObject(doneObjects.Select(x => x.Checksum).ToArray()); } return(relObjects.Select(x => x.FilePath).ToArray()); }
public static PackageMetaData TryLoad(IAbsoluteFilePath metaDataPath) { try { return(Repository.Load <PackageMetaDataDto, PackageMetaData>(metaDataPath)); } catch (Exception) { return(null); } }
void PrintDetailedChanges(Package.ChangeList changeAg, bool withRemoval) { var overview = new StringBuilder(); var full = new StringBuilder(); BuildLogInfos(changeAg.Equal, overview, full, changeAg.Copy, changeAg.Update, withRemoval ? changeAg.Remove : new List <string>(), changeAg.New, changeAg.ChangedCase); this.Logger().Info(full.ToString()); Repository.Log(overview.ToString()); }
void PrintChangeOverview(IEnumerable <IAbsoluteFilePath> files, IEnumerable <FileObjectMapping> mappings) { var overview = new StringBuilder(); var full = new StringBuilder(); BuildShortLogInfo("Current files", files.Select(x => x.FileName), overview, full); BuildShortLogInfo("Needed files", mappings.Select(x => x.FilePath), overview, full); this.Logger().Info(full.ToString()); Repository.Log(overview.ToString()); }
void UpdateFileMetaData(PackageMetaData metaData, FileObjectMapping x, ICollection <string> paths) { metaData.Size += new FileInfo(Path.Combine(WorkingPath.ToString(), x.FilePath)).Length; var path = Repository.GetObjectPath(x.Checksum); if (paths.Contains(path.ToString())) { return; } paths.Add(path.ToString()); metaData.SizePacked += new FileInfo(path.ToString()).Length; }
void ProcessObject(bool skipWhenLocalFileMatches, FileObjectMapping o, ICollection <FileObjectMapping> validObjects) { if (skipWhenLocalFileMatches) { // We can also skip objects that already match in the working directory so that we don't waste time on compressing or copying objects needlessly // this however could create more bandwidth usage in case the user in the future deletes working files, and tries to get the version again // in that case the objects will need to be redownloaded, or at least patched up from other possible available objects. var path = WorkingPath.GetChildFileWithName(o.FilePath); if (path.Exists && Repository.GetChecksum(path).Equals(o.Checksum)) { validObjects.Add(o); if (Common.Flags.Verbose) { MainLog.Logger.Info( $"Marking {o.FilePath} ({o.Checksum}) as valid, because the local object matches"); } return; } var oPath = Repository.GetObjectPath(o.Checksum); if (oPath.Exists) { validObjects.Add(o); if (Common.Flags.Verbose) { MainLog.Logger.Info( $"Marking {o.FilePath} ({o.Checksum}) as valid, because the packed object exists"); } // Don't readd object because we can't validate if the Checksum is in order.. } } else { var ob = Repository.GetObject(o.Checksum); if (ob == null) { return; } var oPath = Repository.GetObjectPath(o.Checksum); if (oPath.Exists && Repository.GetChecksum(oPath).Equals(ob.ChecksumPack)) { validObjects.Add(o); if (Common.Flags.Verbose) { MainLog.Logger.Info( $"Marking {o.FilePath} ({o.Checksum}) as valid, because the packed object matches"); } } } }
IAbsoluteFilePath[] GetWorkingPathFiles(bool withRemoval, IOrderedEnumerable <FileObjectMapping> mappings) { if (!withRemoval) { return (mappings.Select(x => WorkingPath.GetChildFileWithName(x.FilePath)) .Where(x => x.Exists).ToArray()); } var files = Repository.GetFiles(WorkingPath); return(files .OrderByDescending(x => Tools.FileUtil.SizePrediction(x.FileName)).ToArray()); }
void ProcessModified(IDictionary <string, Status> statusDic, FileObjectMapping fcm, Action <double, long?> act) { var status = statusDic[fcm.FilePath]; status.Progress = 0; status.Action = RepoStatus.Unpacking; var destFile = WorkingPath.GetChildFileWithName(fcm.FilePath); var packedFile = Repository.GetObjectPath(fcm.Checksum); destFile.ParentDirectoryPath.MakeSurePathExists(); Tools.Compression.Gzip.UnpackSingleGzip(packedFile, destFile, new StatusWrapper(status, act)); status.EndOutput(); }
PackageMetaData UpdateMetaData(bool downCase, PackageMetaData metaData) { metaData.Files = Repository.Commit(WorkingPath, downCase); var paths = new List <string>(); GetMetaDataFilesOrderedBySize(metaData).ForEach(x => UpdateFileMetaData(metaData, x, paths)); if (string.IsNullOrWhiteSpace(metaData.ContentType)) { return(metaData); } Repository.SetContentType(metaData.Name, metaData.ContentType); Repository.Save(); // TODO: This should be done in one go, now its done twice once at Repository.Commit and once here :S return(metaData); }
void ProcessMissingObjects(IDictionary <string, PackageMetaData> cache, string package, Package.ObjectMap missing) { var metadata = RetrieveMetaData(cache, package); if ((metadata == null) || !metadata.Files.ContainsKey(missing.FO.FilePath)) { return; } var match = metadata.Files[missing.FO.FilePath]; var oPath = Repository.GetObjectPath(match); if (oPath.Exists) { missing.ExistingObject = match; } }
public Package(IAbsoluteDirectoryPath workingDirectory, PackageMetaData metaData, Repository repository) { if (workingDirectory == null) { throw new ArgumentNullException(nameof(workingDirectory)); } if (repository == null) { throw new ArgumentNullException(nameof(repository)); } WorkingPath = workingDirectory; Repository = repository; ConfirmPathValidity(); MetaData = metaData; StatusRepo = new StatusRepo(); }
public Package(IAbsoluteDirectoryPath workingDirectory, string packageName, Repository repository) { if (workingDirectory == null) { throw new ArgumentNullException(nameof(workingDirectory)); } if (repository == null) { throw new ArgumentNullException(nameof(repository)); } WorkingPath = workingDirectory; Repository = repository; ConfirmPathValidity(); MetaData = Load(Repository.GetMetaDataPath(packageName)); ConfirmPackageValidity(packageName); StatusRepo = new StatusRepo(); }
void HandleMissingObjects(List <Package.ObjectMap> missingObjects) { var currentPackage = MetaData.GetFullName(); var packages = Repository.GetPackagesList() .Where(x => !x.Equals(currentPackage)) .OrderByDescending(x => x.StartsWith(MetaData.Name)).ToArray(); if (packages.Any()) { ProcessMissingObjects(missingObjects, packages); } var resolvableObjects = missingObjects.Where(x => x.ExistingObject != null).ToArray(); StatusRepo.Reset(RepoStatus.Copying, resolvableObjects.Length); foreach (var o in resolvableObjects) { this.Logger() .Info("Found local previous version match for {0}", o.FO.FilePath, o.ExistingObject, o.FO.Checksum); missingObjects.Remove(o); } StatusRepo.Reset(RepoStatus.Packing, missingObjects.Count); var resolvedObjects = new List <Package.ObjectMap>(); foreach (var o in missingObjects) { ProcessMissingObject(o, resolvedObjects); } Repository.ReAddObject(resolvedObjects.Select(x => x.ExistingObject).ToArray()); foreach (var o in resolvedObjects) { missingObjects.Remove(o); } Repository.Log( "\nFound {0} missing objects, resolved {1} candidates from other packages and {2} from uncompressed files", missingObjects.Count + resolvedObjects.Count + resolvableObjects.Length, resolvableObjects.Length, resolvedObjects.Count); }
void ProcessMissingObject(Package.ObjectMap o, ICollection <Package.ObjectMap> resolvedObjects) { var f = WorkingPath.GetChildFileWithName(o.FO.FilePath); if (!f.Exists) { return; } var status = new Status(o.FO.FilePath, StatusRepo) { Action = RepoStatus.Packing, RealObject = GetObjectPathFromChecksum(o.FO) }; var checksum = Repository.GetChecksum(f); this.Logger().Info("Found local previous version file for {0}. Compressing to {1}", o.FO.FilePath, checksum); Repository.CompressObject(f, checksum); o.ExistingObject = checksum; resolvedObjects.Add(o); status.EndOutput(); }
IEnumerable <Package.ObjectMap> GetMissingObjectMapping(IEnumerable <Package.ObjectMap> objects) => objects.Select(o => new { o, f = Repository.GetObjectPath(o.FO) }) .Where(t => !t.f.Exists) .Select(t => t.o);
public static PackageMetaData Load(IAbsoluteFilePath metaDataPath) => Repository.Load <PackageMetaDataDto, PackageMetaData>(metaDataPath);
Task SaveAsync() => Repository.SavePackageAsync(this);
void Save() { Repository.SavePackage(this); }
IEnumerable <string> GetExistingObjects(IEnumerable <FileObjectMapping> objects) => GetMetaDataFilesOrderedBySize() .Select(x => x.Checksum) .Except(objects.Select(x => x.Checksum)) .Select(x => Repository.GetObjectSubPath(x));