/// <summary> /// Check that changes grouped by entity id belongs to website. /// </summary> /// <param name="groupedChanges">Changes grouped by entity id.</param> /// <param name="context">Crm DB context.</param> /// <param name="websiteId">Website id.</param> /// <returns></returns> private bool ChangesBelongsToWebsite(IGrouping <Guid, IChangedItem> groupedChanges, CrmDbContext context, Guid websiteId) { var entityId = groupedChanges.Key; var entityName = this.GetEntityNameFromChangedItem(groupedChanges.First()); if (string.Equals("adx_website", entityName, StringComparison.OrdinalIgnoreCase)) { return(websiteId == entityId); } // if entity hasn't relationship with website or entity was deleted -> mark as `belongs to website` EntityTrackingInfo info; if (groupedChanges.Any(gc => gc.Type == ChangeType.RemoveOrDeleted) || !entityInfoList.TryGetValue(entityName, out info) || info.WebsiteLookupAttribute == null) { return(true); } // trying to get website's id from changed items var itemWithWebsiteIdValue = groupedChanges .OfType <NewOrUpdatedItem>() .FirstOrDefault(item => item.NewOrUpdatedEntity.Contains(info.WebsiteLookupAttribute)); // if all changes doesn't contain website lookup attribute but we know that entity should have it then try to get value from service context var updatedEntity = itemWithWebsiteIdValue != null ? itemWithWebsiteIdValue.NewOrUpdatedEntity : context.Service.RetrieveSingle(new EntityReference(entityName, entityId), new ColumnSet(info.WebsiteLookupAttribute)); return(updatedEntity?.GetAttributeValue <EntityReference>(info.WebsiteLookupAttribute)?.Id == websiteId); }
public IEnumerable <IUiLeafsAccessor> AcessToLeafs(IGrouping <UiNodeType, IUiLeaf> group, bool?conversion, bool?compression) { switch (group.Key) { case UiNodeType.ArchiveLeaf: { foreach (UiArciveLeafsAccessor accessor in GroupArchiveLeafs(group.OfType <UiArchiveLeaf>(), conversion, compression)) { yield return(accessor); } break; } case UiNodeType.FileTableLeaf: { foreach (UiWpdLeafsAccessor accessor in GroupWpdLeafs(group.OfType <UiWpdTableLeaf>(), conversion)) // TODO: SEDBLeafs { yield return(accessor); } break; } case UiNodeType.DataTableLeaf: { foreach (UiWdbMovieLeafsAccessor accessor in GroupWdbLeafs(group.OfType <UiWdbMovieLeaf>(), conversion)) { yield return(accessor); } break; } default: { throw new NotImplementedException(group.Key.ToString()); } } }
private void InstallArchive(Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status($"Extracting {archive.Name}"); var vfiles = grouping.Select(g => { var file = VFS.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var on_finish = VFS.Stage(vfiles.Select(f => f.FromFile).Distinct()); Status("Copying files for {0}", archive.Name); vfiles.DoIndexed((idx, file) => { Utils.Status($"Installing files", idx * 100 / vfiles.Count); File.Copy(file.FromFile.StagedPath, Path.Combine(Outputfolder, file.To)); }); Status("Unstaging files"); on_finish(); // Now patch all the files from this archive foreach (var to_patch in grouping.OfType <PatchedFromArchive>()) { using (var patch_stream = new MemoryStream()) { Status("Patching {0}", Path.GetFileName(to_patch.To)); // Read in the patch data var patch_data = to_patch.Patch.FromBase64(); var to_file = Path.Combine(Outputfolder, to_patch.To); MemoryStream old_data = new MemoryStream(File.ReadAllBytes(to_file)); // Remove the file we're about to patch File.Delete(to_file); // Patch it using (var out_stream = File.OpenWrite(to_file)) { BSDiff.Apply(old_data, () => new MemoryStream(patch_data), out_stream); } } } }
private async Task InstallArchive(WorkQueue queue, Archive archive, AbsolutePath absolutePath, IGrouping <Hash, FromArchive> grouping) { Status($"Extracting {archive.Name}"); List <FromArchive> vFiles = grouping.Select(g => { var file = VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var onFinish = await VFS.Stage(vFiles.Select(f => f.FromFile).Distinct()); Status($"Copying files for {archive.Name}"); async ValueTask CopyFile(AbsolutePath from, AbsolutePath to, bool useMove) { if (to.Exists) { if (to.IsReadOnly) { to.IsReadOnly = false; } to.Delete(); } if (from.Exists) { if (from.IsReadOnly) { from.IsReadOnly = false; } } if (useMove) { from.MoveTo(to); } else { from.CopyTo(to); } // If we don't do this, the file will use the last-modified date of the file when it was compressed // into an archive, which isn't really what we want in the case of files installed archives to.LastModified = DateTime.Now; } await vFiles.GroupBy(f => f.FromFile) .PDoIndexed(queue, async(idx, group) => { Utils.Status("Installing files", Percent.FactoryPutInRange(idx, vFiles.Count)); if (group.Key == null) { throw new ArgumentNullException("FromFile was null"); } var firstDest = OutputFolder.Combine(group.First().To); await CopyFile(group.Key.StagedPath, firstDest, true); foreach (var copy in group.Skip(1)) { await CopyFile(firstDest, OutputFolder.Combine(copy.To), false); } }); Status("Unstaging files"); await onFinish(); // Now patch all the files from this archive await grouping.OfType <PatchedFromArchive>() .PMap(queue, async toPatch => { await using var patchStream = new MemoryStream(); Status($"Patching {toPatch.To.FileName}"); // Read in the patch data byte[] patchData = await LoadBytesFromPath(toPatch.PatchID); var toFile = OutputFolder.Combine(toPatch.To); var oldData = new MemoryStream(await toFile.ReadAllBytesAsync()); // Remove the file we're about to patch toFile.Delete(); // Patch it await using (var outStream = toFile.Create()) { Utils.ApplyPatch(oldData, () => new MemoryStream(patchData), outStream); } Status($"Verifying Patch {toPatch.To.FileName}"); var resultSha = await toFile.FileHashAsync(); if (resultSha != toPatch.Hash) { throw new InvalidDataException($"Invalid Hash for {toPatch.To} after patching"); } }); }
private async Task InstallArchive(WorkQueue queue, Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status($"Extracting {archive.Name}"); List <FromArchive> vFiles = grouping.Select(g => { var file = VFS.Index.FileForArchiveHashPath(g.ArchiveHashPath); g.FromFile = file; return(g); }).ToList(); var onFinish = await VFS.Stage(vFiles.Select(f => f.FromFile).Distinct()); Status($"Copying files for {archive.Name}"); void CopyFile(string from, string to, bool useMove) { if (File.Exists(to)) { var fi = new FileInfo(to); if (fi.IsReadOnly) { fi.IsReadOnly = false; } File.Delete(to); } if (File.Exists(from)) { var fi = new FileInfo(from); if (fi.IsReadOnly) { fi.IsReadOnly = false; } } if (useMove) { File.Move(from, to); } else { File.Copy(from, to); } // If we don't do this, the file will use the last-modified date of the file when it was compressed // into an archive, which isn't really what we want in the case of files installed archives File.SetLastWriteTime(to, DateTime.Now); } await vFiles.GroupBy(f => f.FromFile) .PDoIndexed(queue, (idx, group) => { Utils.Status("Installing files", idx * 100 / vFiles.Count); var firstDest = Path.Combine(OutputFolder, group.First().To); CopyFile(group.Key.StagedPath, firstDest, true); foreach (var copy in group.Skip(1)) { var nextDest = Path.Combine(OutputFolder, copy.To); CopyFile(firstDest, nextDest, false); } }); Status("Unstaging files"); onFinish(); // Now patch all the files from this archive foreach (var toPatch in grouping.OfType <PatchedFromArchive>()) { using (var patchStream = new MemoryStream()) { Status($"Patching {Path.GetFileName(toPatch.To)}"); // Read in the patch data byte[] patchData = LoadBytesFromPath(toPatch.PatchID); var toFile = Path.Combine(OutputFolder, toPatch.To); var oldData = new MemoryStream(File.ReadAllBytes(toFile)); // Remove the file we're about to patch File.Delete(toFile); // Patch it using (var outStream = File.Open(toFile, FileMode.Create)) { BSDiff.Apply(oldData, () => new MemoryStream(patchData), outStream); } Status($"Verifying Patch {Path.GetFileName(toPatch.To)}"); var resultSha = toFile.FileHash(); if (resultSha != toPatch.Hash) { throw new InvalidDataException($"Invalid Hash for {toPatch.To} after patching"); } } } }
private void InstallArchive(Archive archive, string absolutePath, IGrouping <string, FromArchive> grouping) { Status("Extracting {0}", archive.Name); var files = grouping.GroupBy(e => e.From) .ToDictionary(e => e.Key); using (var a = new ArchiveFile(absolutePath)) { a.Extract(entry => { if (files.TryGetValue(entry.FileName, out var directives)) { var directive = directives.First(); var absolute = Path.Combine(Outputfolder, directive.To); if (absolute.FileExists()) { File.Delete(absolute); } return(File.OpenWrite(absolute)); } return(null); }); } Status("Copying duplicated files for {0}", archive.Name); foreach (var dups in files.Where(e => e.Value.Count() > 1).Select(v => v.Value)) { var ffrom = dups.First(); var from_path = Path.Combine(Outputfolder, ffrom.To); foreach (var to in dups.Skip(1)) { var to_path = Path.Combine(Outputfolder, to.To); if (to_path.FileExists()) { File.Delete(to_path); } File.Copy(from_path, to_path); } } ; // Now patch all the files from this archive foreach (var to_patch in grouping.OfType <PatchedFromArchive>()) { using (var patch_stream = new MemoryStream()) { Status("Patching {0}", Path.GetFileName(to_patch.To)); // Read in the patch data var patch_data = to_patch.Patch.FromBase64(); var to_file = Path.Combine(Outputfolder, to_patch.To); MemoryStream old_data = new MemoryStream(File.ReadAllBytes(to_file)); // Remove the file we're about to patch File.Delete(to_file); // Patch it using (var out_stream = File.OpenWrite(to_file)) { BSDiff.Apply(old_data, () => new MemoryStream(patch_data), out_stream); } } } }