public void WhenCalculatingBundleHash_HashingBeginsAtFirstObject() { ArchiveAndCompressBundles.TaskInput input = GetDefaultInput(); WriteResult result = AddSimpleBundle(input, "mybundle", "internalName"); // Add a serialized. Say that the first object begins 100 bytes into the file var osi = new ObjectSerializedInfo(); SerializedLocation header = new SerializedLocation(); header.SetFileName(result.resourceFiles[0].fileAlias); header.SetOffset(100); osi.SetHeader(header); result.SetSerializedObjects(new ObjectSerializedInfo[] { osi }); ResourceFile rf = result.resourceFiles[0]; rf.SetSerializedFile(true); result.SetResourceFiles(new ResourceFile[] { rf }); input.InternalFilenameToWriteResults["internalName"] = result; string srcFile = input.InternalFilenameToWriteResults["internalName"].resourceFiles[0].fileName; ArchiveAndCompressBundles.Run(input, out ArchiveAndCompressBundles.TaskOutput output); // Change the first 100 bytes. This is before the serialized object. WriteRandomData(srcFile, 100, 1); ArchiveAndCompressBundles.Run(input, out ArchiveAndCompressBundles.TaskOutput output2); // Change the first 104 bytes. This should affect the hash WriteRandomData(srcFile, 104, 2); ArchiveAndCompressBundles.Run(input, out ArchiveAndCompressBundles.TaskOutput output3); Assert.AreEqual(output.BundleDetails["mybundle"].Hash, output2.BundleDetails["mybundle"].Hash); Assert.AreNotEqual(output.BundleDetails["mybundle"].Hash, output3.BundleDetails["mybundle"].Hash); }
// Create a populated 'ObjectSerializedInfo' instance. Can set fields directly for 2019.4+ as internals are available but have to use reflection for 2018.4 static ObjectSerializedInfo CreateObjectSerializedInfo(ObjectIdentifier serializedObject, SerializedLocation header, SerializedLocation rawData) { ObjectSerializedInfo objectSerializedInfo = new ObjectSerializedInfo(); #if UNITY_2019_4_OR_NEWER objectSerializedInfo.m_SerializedObject = serializedObject; objectSerializedInfo.m_Header = header; objectSerializedInfo.m_RawData = rawData; #else SetFieldValue(objectSerializedInfo, "m_SerializedObject", serializedObject); SetFieldValue(objectSerializedInfo, "m_Header", header); SetFieldValue(objectSerializedInfo, "m_RawData", rawData); #endif return(objectSerializedInfo); }
static WriteResult CreateSyntheticWriteResult(System.Random rnd) { ObjectSerializedInfo[] serializedObjects = new ObjectSerializedInfo[] { CreateObjectSerializedInfo(CreateObjectIdentifier(rnd), CreateSerializedLocation("Header_" + rnd.Next(), (uint)rnd.Next(), (uint)rnd.Next()), CreateSerializedLocation("RawData_" + rnd.Next(), (uint)rnd.Next(), (uint)rnd.Next())), CreateObjectSerializedInfo(CreateObjectIdentifier(rnd), CreateSerializedLocation("Header_" + rnd.Next(), (uint)rnd.Next(), (uint)rnd.Next()), CreateSerializedLocation("RawData_" + rnd.Next(), (uint)rnd.Next(), (uint)rnd.Next())) }; ResourceFile[] resourceFiles = new ResourceFile[] { CreateResourceFile("Filename_" + rnd.Next(), "FileAlias_" + rnd.Next(), ((rnd.Next() & 1) != 0)), CreateResourceFile("Filename_" + rnd.Next(), "FileAlias_" + rnd.Next(), ((rnd.Next() & 1) != 0)) }; return(CreateWriteResult(serializedObjects, resourceFiles, new Type[] { typeof(ScriptableObject), typeof(Vector2) }, new String[] { "IncludedSerializeReferenceFQN_" + rnd.Next(), "IncludedSerializeReferenceFQN_" + rnd.Next() })); }
static Dictionary <string, ulong> CalculateHashFileOffsets(TaskInput input) { Dictionary <string, ulong> fileOffsets = new Dictionary <string, ulong>(); foreach (var pair in input.InternalFilenameToWriteResults) { foreach (ResourceFile serializedFile in pair.Value.resourceFiles) { if (!serializedFile.serializedFile) { continue; } ObjectSerializedInfo firstObject = pair.Value.serializedObjects.First(x => x.header.fileName == serializedFile.fileAlias); fileOffsets[serializedFile.fileName] = firstObject.header.offset; } } return(fileOffsets); }
public ReturnCode Run() { Dictionary <string, ulong> fileOffsets = new Dictionary <string, ulong>(); List <KeyValuePair <string, List <ResourceFile> > > bundleResources; { Dictionary <string, List <ResourceFile> > bundleToResources = new Dictionary <string, List <ResourceFile> >(); foreach (var pair in m_Results.WriteResults) { string bundle = m_WriteData.FileToBundle[pair.Key]; List <ResourceFile> resourceFiles; bundleToResources.GetOrAdd(bundle, out resourceFiles); resourceFiles.AddRange(pair.Value.resourceFiles); foreach (ResourceFile serializedFile in pair.Value.resourceFiles) { if (!serializedFile.serializedFile) { continue; } ObjectSerializedInfo firstObject = pair.Value.serializedObjects.First(x => x.header.fileName == serializedFile.fileAlias); fileOffsets[serializedFile.fileName] = firstObject.header.offset; } } bundleResources = bundleToResources.ToList(); } Dictionary <string, HashSet <string> > bundleDependencies = new Dictionary <string, HashSet <string> >(); foreach (var files in m_WriteData.AssetToFiles.Values) { if (files.IsNullOrEmpty()) { continue; } string bundle = m_WriteData.FileToBundle[files.First()]; HashSet <string> dependencies; bundleDependencies.GetOrAdd(bundle, out dependencies); dependencies.UnionWith(files.Select(x => m_WriteData.FileToBundle[x])); dependencies.Remove(bundle); } IList <CacheEntry> entries = bundleResources.Select(x => GetCacheEntry(x.Key, x.Value, m_Parameters.GetCompressionForIdentifier(x.Key))).ToList(); IList <CachedInfo> cachedInfo = null; IList <CachedInfo> uncachedInfo = null; if (m_Parameters.UseCache && m_Cache != null) { m_Cache.LoadCachedData(entries, out cachedInfo); uncachedInfo = new List <CachedInfo>(); } for (int i = 0; i < bundleResources.Count; i++) { string bundleName = bundleResources[i].Key; ResourceFile[] resourceFiles = bundleResources[i].Value.ToArray(); BuildCompression compression = m_Parameters.GetCompressionForIdentifier(bundleName); string writePath; BundleDetails details; if (cachedInfo != null && cachedInfo[i] != null) { if (!m_Tracker.UpdateInfoUnchecked(string.Format("{0} (Cached)", bundleName))) { return(ReturnCode.Canceled); } details = (BundleDetails)cachedInfo[i].Data[0]; details.FileName = string.Format("{0}/{1}", m_Parameters.OutputFolder, bundleName); HashSet <string> dependencies; if (bundleDependencies.TryGetValue(bundleName, out dependencies)) { details.Dependencies = dependencies.ToArray(); } else { details.Dependencies = new string[0]; } writePath = string.Format("{0}/{1}", m_Cache.GetCachedArtifactsDirectory(entries[i]), bundleName); } else { if (!m_Tracker.UpdateInfoUnchecked(bundleName)) { return(ReturnCode.Canceled); } details = new BundleDetails(); writePath = string.Format("{0}/{1}", m_Parameters.TempOutputFolder, bundleName); if (m_Parameters.UseCache && m_Cache != null) { writePath = string.Format("{0}/{1}", m_Cache.GetCachedArtifactsDirectory(entries[i]), bundleName); } Directory.CreateDirectory(Path.GetDirectoryName(writePath)); details.FileName = string.Format("{0}/{1}", m_Parameters.OutputFolder, bundleName); details.Crc = ContentBuildInterface.ArchiveAndCompress(resourceFiles, writePath, compression); details.Hash = CalculateHashVersion(fileOffsets, resourceFiles); HashSet <string> dependencies; if (bundleDependencies.TryGetValue(bundleName, out dependencies)) { details.Dependencies = dependencies.ToArray(); } else { details.Dependencies = new string[0]; } if (uncachedInfo != null) { uncachedInfo.Add(GetCachedInfo(m_Cache, entries[i], resourceFiles, details)); } } SetOutputInformation(writePath, details.FileName, bundleName, details); } if (m_Parameters.UseCache && m_Cache != null) { m_Cache.SaveCachedData(uncachedInfo); } return(ReturnCode.Success); }