public static bool Compare(string pathToZip1, string pathToZip2, TextWriter log) { var c1 = ChecksumMaker.GetChecksum(pathToZip1); var c2 = ChecksumMaker.GetChecksum(pathToZip2); if (c1.wholeChecksum == c2.wholeChecksum) { return(true); } // If there's a checksum mismatch, do a more intensive comparison to find the difference. #if DEBUG // Provide a comparison that can be very specific about what the difference is. Dictionary <string, string> comp = new Dictionary <string, string>(); var h1 = Test(pathToZip1, log, comp, true); var h2 = Test(pathToZip2, log, comp, false); foreach (var kv in comp) // Remaining entries are errors. { Console.WriteLine("FAIL: 2nd is missing " + kv.Key); } if (h1 == h2) { log.WriteLine("Same!"); return(true); } Console.WriteLine("FAIL!!"); #endif return(false); }
private static void ComputeAndWriteChecksum(CanvasDocument app, ChecksumMaker checksum, ZipArchive z, ErrorContainer errors, bool isValidation) { var hash = checksum.GetChecksum(); if (app._checksum != null && hash.wholeChecksum != app._checksum.ClientStampedChecksum) { // These warnings are Debug only. Throwing a bunch of warning messages at the customer could lead to them ignoring real errors. #if DEBUG if (app._checksum.ClientPerFileChecksums != null) { foreach (var file in app._checksum.ClientPerFileChecksums) { if (!hash.perFileChecksum.TryGetValue(file.Key, out var fileChecksum)) { errors.ChecksumMismatch("Missing file " + file.Key); } else if (fileChecksum != file.Value) { errors.ChecksumMismatch($"File {file.Key} checksum does not match on extract"); } } foreach (var file in hash.perFileChecksum) { if (!app._checksum.ClientPerFileChecksums.ContainsKey(file.Key)) { errors.ChecksumMismatch("Extra file " + file.Key); } } } #endif #if !DEBUG // These are the non-debug warnings, if it's unpack this was a serious error, on -pack it's most likely not if (isValidation) { errors.PostUnpackValidationFailed(); throw new DocumentException(); } #endif errors.ChecksumMismatch("Checksum indicates that sources have been edited since they were unpacked. If this was intentional, ignore this warning."); } var checksumJson = new ChecksumJson { ClientStampedChecksum = hash.wholeChecksum, ClientPerFileChecksums = hash.perFileChecksum, ServerStampedChecksum = app._checksum?.ServerStampedChecksum, ServerPerFileChecksums = app._checksum?.ServerPerFileChecksums, }; var entry = ToFile(FileKind.Checksum, checksumJson); var e = z.CreateEntry(entry.Name.ToMsAppPath()); using (var dest = e.Open()) { dest.Write(entry.RawBytes, 0, entry.RawBytes.Length); } }
public static (string wholeChecksum, Dictionary <string, string> perFileChecksum) GetChecksum(ZipArchive zip) { ChecksumMaker checksumMaker = new ChecksumMaker(); foreach (var entry in zip.Entries) { checksumMaker.AddFile(entry.FullName, entry.ToBytes()); } return(checksumMaker.GetChecksum()); }
private static void ComputeAndWriteChecksum(CanvasDocument app, ChecksumMaker checksum, ZipArchive z, ErrorContainer errors) { var hash = checksum.GetChecksum(); if (hash.wholeChecksum != app._checksum.ClientStampedChecksum) { if (app._checksum.ClientPerFileChecksums != null) { // We had offline edits! errors.ChecksumMismatch("Sources have changed since when they were unpacked."); foreach (var file in app._checksum.ClientPerFileChecksums) { if (!hash.perFileChecksum.TryGetValue(file.Key, out var fileChecksum)) { errors.ChecksumMismatch("Missing file " + file.Key); } if (fileChecksum != file.Value) { errors.ChecksumMismatch($"File {file.Key} checksum does not match on extract"); } } foreach (var file in hash.perFileChecksum) { if (!app._checksum.ClientPerFileChecksums.ContainsKey(file.Key)) { errors.ChecksumMismatch("Extra file " + file.Key); } } } } var checksumJson = new ChecksumJson { ClientStampedChecksum = hash.wholeChecksum, ClientPerFileChecksums = hash.perFileChecksum, ServerStampedChecksum = app._checksum.ServerStampedChecksum, ServerPerFileChecksums = app._checksum.ServerPerFileChecksums, }; var entry = ToFile(FileKind.Checksum, checksumJson); var e = z.CreateEntry(entry.Name); using (var dest = e.Open()) { dest.Write(entry.RawBytes, 0, entry.RawBytes.Length); } }
// Overload with ErrorContainer public static bool Compare(string pathToZip1, string pathToZip2, TextWriter log, ErrorContainer errorContainer) { var c1 = ChecksumMaker.GetChecksum(pathToZip1); var c2 = ChecksumMaker.GetChecksum(pathToZip2); if (c1.wholeChecksum == c2.wholeChecksum) { return(true); } // Provide a comparison that can be very specific about what the difference is. var comp = new Dictionary <string, byte[]>(); CompareChecksums(pathToZip1, log, comp, true, errorContainer); CompareChecksums(pathToZip2, log, comp, false, errorContainer); return(false); }
// Write back out to a msapp file. public static void SaveAsMsApp(CanvasDocument app, string fullpathToMsApp, ErrorContainer errors, bool isValidation = false) { app.ApplyBeforeMsAppWriteTransforms(errors); if (!fullpathToMsApp.EndsWith(".msapp", StringComparison.OrdinalIgnoreCase) && fullpathToMsApp.EndsWith(".zip", StringComparison.OrdinalIgnoreCase)) { throw new InvalidOperationException("Only works for .msapp files"); } if (File.Exists(fullpathToMsApp)) // Overwrite! { File.Delete(fullpathToMsApp); } var checksum = new ChecksumMaker(); DirectoryWriter.EnsureFileDirExists(fullpathToMsApp); using (var z = ZipFile.Open(fullpathToMsApp, ZipArchiveMode.Create)) { foreach (FileEntry entry in app.GetMsAppFiles(errors)) { if (entry != null) { var e = z.CreateEntry(entry.Name.ToMsAppPath()); using (var dest = e.Open()) { dest.Write(entry.RawBytes, 0, entry.RawBytes.Length); checksum.AddFile(entry.Name.ToMsAppPath(), entry.RawBytes); } } } ComputeAndWriteChecksum(app, checksum, z, errors, isValidation); } // Undo BeforeWrite transforms so CanvasDocument representation is unchanged app.ApplyAfterMsAppLoadTransforms(errors); }
// Compare the debug checksums. // Get a hash for the MsApp file. // First pass adds file/hash to comp. // Second pass checks hash equality and removes files from comp. // After second pass, comp should be 0. Any files in comp were missing from 2nd pass. public static void CompareChecksums(string pathToZip, TextWriter log, Dictionary <string, byte[]> comp, bool first, ErrorContainer errorContainer) { // Path to the directory where we are creating the normalized form string normFormDir = ".\\diffFiles"; // Create directory if doesn't exist if (!Directory.Exists(normFormDir)) { Directory.CreateDirectory(normFormDir); } using (var zip = ZipFile.OpenRead(pathToZip)) { foreach (ZipArchiveEntry entry in zip.Entries.OrderBy(x => x.FullName)) { var newContents = ChecksumMaker.ChecksumFile <DebugTextHashMaker>(entry.FullName, entry.ToBytes()); if (newContents == null) { continue; } // Do easy diffs { if (first) { comp.Add(entry.FullName, newContents); } else { byte[] originalContents; if (comp.TryGetValue(entry.FullName, out originalContents)) { bool same = newContents.SequenceEqual(originalContents); if (!same) { var jsonDictionary1 = FlattenJson(originalContents); var jsonDictionary2 = FlattenJson(newContents); // Add JSONMismatch error if JSON property was changed or removed CheckPropertyChangedRemoved(jsonDictionary1, jsonDictionary2, errorContainer, ""); // Add JSONMismatch error if JSON property was added CheckPropertyAdded(jsonDictionary1, jsonDictionary2, errorContainer, ""); #if DEBUG DebugMismatch(entry, originalContents, newContents, normFormDir); #endif } comp.Remove(entry.FullName); } else { // Missing file! Console.WriteLine("FAIL: 2nd has added file: " + entry.FullName); } } } } } }
public static CanvasDocument Load(Stream streamToMsapp, ErrorContainer errors) { if (streamToMsapp == null) { throw new ArgumentNullException(nameof(streamToMsapp)); } // Read raw files. // Apply transforms. var app = new CanvasDocument(); app._checksum = new ChecksumJson(); // default empty. Will get overwritten if the file is present. app._templateStore = new EditorState.TemplateStore(); app._editorStateStore = new EditorState.EditorStateStore(); ComponentsMetadataJson componentsMetadata = null; DataComponentTemplatesJson dctemplate = null; DataComponentSourcesJson dcsources = null; ChecksumMaker checksumMaker = new ChecksumMaker(); // key = screen, value = index var screenOrder = new Dictionary <string, double>(); ZipArchive zipOpen; try { zipOpen = new ZipArchive(streamToMsapp, ZipArchiveMode.Read); } catch (Exception e) { // Catch cases where stream is corrupted, can't be read, or unavailable. errors.MsAppFormatError(e.Message); return(null); } using (var z = zipOpen) { foreach (var entry in z.Entries) { checksumMaker.AddFile(entry.FullName, entry.ToBytes()); var fullName = entry.FullName; var kind = FileEntry.TriageKind(FilePath.FromMsAppPath(fullName)); switch (kind) { default: // Track any unrecognized files so we can save back. app.AddFile(FileEntry.FromZip(entry)); break; case FileKind.Resources: app._resourcesJson = ToObject <ResourcesJson>(entry); foreach (var resource in app._resourcesJson.Resources) { if (resource.ResourceKind == "LocalFile") { app._entropy.LocalResourceRootPaths.Add(resource.Name, resource.RootPath); resource.RootPath = null; } } break; case FileKind.Asset: app.AddAssetFile(FileEntry.FromZip(entry, name: fullName.Substring("Assets\\".Length))); break; case FileKind.Checksum: app._checksum = ToObject <ChecksumJson>(entry); break; case FileKind.OldEntityJSon: errors.FormatNotSupported($"This is using an older v1 msapp format that is not supported."); throw new DocumentException(); case FileKind.DataComponentTemplates: dctemplate = ToObject <DataComponentTemplatesJson>(entry); break; case FileKind.ComponentsMetadata: componentsMetadata = ToObject <ComponentsMetadataJson>(entry); break; case FileKind.DataComponentSources: dcsources = ToObject <DataComponentSourcesJson>(entry); break; case FileKind.Properties: app._properties = ToObject <DocumentPropertiesJson>(entry); break; case FileKind.Themes: app._themes = ToObject <ThemesJson>(entry); break; case FileKind.Header: app._header = ToObject <HeaderJson>(entry); app._entropy.SetHeaderLastSaved(app._header.LastSavedDateTimeUTC); app._header.LastSavedDateTimeUTC = null; break; case FileKind.PublishInfo: app._publishInfo = ToObject <PublishInfoJson>(entry); break; case FileKind.AppCheckerResult: app._appCheckerResultJson = ToObject <AppCheckerResultJson>(entry); break; case FileKind.ComponentSrc: case FileKind.ControlSrc: case FileKind.TestSrc: { var control = ToObject <ControlInfoJson>(entry); var sf = SourceFile.New(control); // Add to screen order, only screens have meaningful indices, components may have collisions if (!ExcludeControlFromScreenOrdering(sf)) { screenOrder.Add(control.TopParent.Name, control.TopParent.Index); } var flattenedControlTree = sf.Flatten(); foreach (var ctrl in flattenedControlTree) { // Add PublishOrderIndex to Entropy so it doesn't affect the editorstate diff. app._entropy.PublishOrderIndices.Add(ctrl.Name, ctrl.PublishOrderIndex); // For component instances, also track their index in Entropy if (ctrl.Index == 0.0 || ctrl.Template.Id == "http://microsoft.com/appmagic/screen") { continue; } app._entropy.ComponentIndexes.Add(ctrl.Name, ctrl.Index); } IRStateHelpers.SplitIRAndState(sf, app._editorStateStore, app._templateStore, app._entropy, out var controlIR); if (kind == FileKind.ComponentSrc) { app._components.Add(sf.ControlName, controlIR); } else { app._screens.Add(sf.ControlName, controlIR); } } break; case FileKind.DataSources: { var dataSources = ToObject <DataSourcesJson>(entry); Utilities.EnsureNoExtraData(dataSources.ExtensionData); int iOrder = 0; foreach (var ds in dataSources.DataSources) { app.AddDataSourceForLoad(ds, iOrder); iOrder++; } } break; case FileKind.Templates: { app._templates = ToObject <TemplatesJson>(entry); int iOrder = 0; foreach (var template in app._templates.UsedTemplates) { app._entropy.Add(template, iOrder); iOrder++; } iOrder = 0; foreach (var template in app._templates.ComponentTemplates ?? Enumerable.Empty <TemplateMetadataJson>()) { app._entropy.AddComponent(template, iOrder); iOrder++; } } break; } } // foreach zip entry foreach (var componentTemplate in app._templates.ComponentTemplates ?? Enumerable.Empty <TemplateMetadataJson>()) { if (!app._templateStore.TryGetTemplate(componentTemplate.Name, out var template)) { continue; } template.TemplateOriginalName = componentTemplate.OriginalName; template.IsComponentLocked = componentTemplate.IsComponentLocked; template.ComponentChangedSinceFileImport = componentTemplate.ComponentChangedSinceFileImport; template.ComponentAllowCustomization = componentTemplate.ComponentAllowCustomization; template.ComponentExtraMetadata = componentTemplate.ExtensionData; if (template.Version != componentTemplate.Version) { app._entropy.SetTemplateVersion(template.Name, componentTemplate.Version); } } app._screenOrder = screenOrder.OrderBy(kvp => kvp.Value).Select(kvp => kvp.Key).ToList(); // Checksums? var currentChecksum = checksumMaker.GetChecksum(); // This is debug only. The server checksum is out of date with the client checksum // The main checksum validation that matters is the repack after unpack #if DEBUG if (app._checksum.ServerStampedChecksum != null && app._checksum.ServerStampedChecksum != currentChecksum.wholeChecksum) { // The server checksum doesn't match the actual contents. // likely has been tampered. errors.ChecksumMismatch("Checksum doesn't match on extract."); if (app._checksum.ServerPerFileChecksums != null) { foreach (var file in app._checksum.ServerPerFileChecksums) { if (!currentChecksum.perFileChecksum.TryGetValue(file.Key, out var fileChecksum)) { errors.ChecksumMismatch("Missing file " + file.Key); } if (fileChecksum != file.Value) { errors.ChecksumMismatch($"File {file.Key} checksum does not match on extract"); } } foreach (var file in currentChecksum.perFileChecksum) { if (!app._checksum.ServerPerFileChecksums.ContainsKey(file.Key)) { errors.ChecksumMismatch("Extra file " + file.Key); } } } } #endif app._checksum.ClientStampedChecksum = currentChecksum.wholeChecksum; app._checksum.ClientPerFileChecksums = currentChecksum.perFileChecksum; // Normalize logo filename. app.TranformLogoOnLoad(); if (app._properties.LibraryDependencies != null) { var refs = Utilities.JsonParse <ComponentDependencyInfo[]>(app._properties.LibraryDependencies); app._libraryReferences = refs; app._properties.LibraryDependencies = null; } if (app._properties.LocalConnectionReferences != null) { var cxs = Utilities.JsonParse <IDictionary <String, ConnectionJson> >(app._properties.LocalConnectionReferences); app._connections = cxs; app._properties.LocalConnectionReferences = null; } if (!string.IsNullOrEmpty(app._properties.LocalDatabaseReferences)) { var dsrs = Utilities.JsonParse <IDictionary <String, LocalDatabaseReferenceJson> >(app._properties.LocalDatabaseReferences); app._dataSourceReferences = dsrs; app._properties.LocalDatabaseReferences = null; app._entropy.LocalDatabaseReferencesAsEmpty = false; } else { app._entropy.LocalDatabaseReferencesAsEmpty = true; } if (componentsMetadata?.Components != null) { int order = 0; foreach (var x in componentsMetadata.Components) { var manifest = ComponentManifest.Create(x); if (!app._templateStore.TryGetTemplate(x.TemplateName, out var templateState)) { errors.FormatNotSupported("Component Metadata contains template not present in the app"); throw new DocumentException(); } templateState.ComponentManifest = manifest; app._entropy.Add(x, order); order++; } } // Only for data-compoents. if (dctemplate?.ComponentTemplates != null) { int order = 0; foreach (var x in dctemplate.ComponentTemplates) { if (x.ComponentType == null) { errors.FormatNotSupported($"Data component {x.Name} is using an outdated format"); throw new DocumentException(); } if (!app._templateStore.TryGetTemplate(x.Name, out var templateState)) { errors.FormatNotSupported("Component Metadata contains template not present in the app"); throw new DocumentException(); } ComponentManifest manifest = templateState.ComponentManifest; // Should already exist app._entropy.SetTemplateVersion(x.Name, x.Version); app._entropy.Add(x, order); manifest.Apply(x); order++; } } if (dcsources?.DataSources != null) { // Component Data sources only appear if the data component is actually // used as a data source in this app. foreach (var x in dcsources.DataSources) { if (x.Type != DataComponentSourcesJson.NativeCDSDataSourceInfo) { throw new NotImplementedException(x.Type); } var ds = new DataSourceEntry { Name = x.Name, DataComponentDetails = x, // pass in all details for full-fidelity Type = DataSourceModel.DataComponentType }; app.AddDataSourceForLoad(ds); } } } app.ApplyAfterMsAppLoadTransforms(errors); app.OnLoadComplete(errors); return(app); }
// Compare the debug checksums. // Get a hash for the MsApp file. // First pass adds file/hash to comp. // Second pass checks hash equality and removes files from comp. // AFter second pass, comp should be 0. any files in comp were missing from 2nd pass. public static void DebugChecksum(string pathToZip, TextWriter log, Dictionary <string, byte[]> comp, bool first) { using (var z = ZipFile.OpenRead(pathToZip)) { foreach (ZipArchiveEntry e in z.Entries.OrderBy(x => x.FullName)) { var key = ChecksumMaker.ChecksumFile <DebugTextHashMaker>(e.FullName, e.ToBytes()); if (key == null) { continue; } // Do easy diffs { if (first) { comp.Add(e.FullName, key); } else { byte[] otherContents; if (comp.TryGetValue(e.FullName, out otherContents)) { bool same = key.SequenceEqual(otherContents); if (!same) { // Fail! Mismatch Console.WriteLine("FAIL: hash mismatch: " + e.FullName); // Write out normalized form. Easier to spot the diff. File.WriteAllBytes(@"c:\temp\a1.json", otherContents); File.WriteAllBytes(@"c:\temp\b1.json", key); // For debugging. Help find exactly where the difference is. for (int i = 0; i < otherContents.Length; i++) { if (i >= key.Length) { break; } if (otherContents[i] != key[i]) { } } } else { // success } comp.Remove(e.FullName); } else { // Missing file! Console.WriteLine("FAIL: 2nd has added file: " + e.FullName); } } } } } }