/// <summary> /// Gets the storage container. /// </summary> /// <param name="storageHeader">The storage header.</param> /// <returns>Content Storage container.</returns> public ContentStorage GetStorage(ref ContentStorageHeader storageHeader) { ContentStorage result; lock (containers) { if (!containers.TryGetValue(storageHeader.DataUrl, out result)) { result = new ContentStorage(this); containers.Add(storageHeader.DataUrl, result); } result.Init(ref storageHeader); } Debug.Assert(result != null && result.Url == storageHeader.DataUrl); return(result); }
internal void Init(ref ContentStorageHeader header) { Url = header.DataUrl; chunks = new ContentChunk[header.ChunksCount]; for (int i = 0; i < chunks.Length; i++) { var e = header.Chunks[i]; chunks[i] = new ContentChunk(this, e.Location, e.Size); } PackageTime = header.PackageTime; // Validate hash code if (GetHashCode() != header.HashCode) { throw new ContentStreamingException("Invalid hash code.", this); } }
/// <summary> /// Reads header instance from a stream. /// </summary> /// <param name="stream">The source stream.</param> /// <param name="result">Result data</param> public static void Read(SerializationStream stream, out ContentStorageHeader result) { result = new ContentStorageHeader(); var version = stream.ReadInt32(); if (version == 1) { result.DataUrl = stream.ReadString(); result.PackageTime = new DateTime(stream.ReadInt64()); int chunksCount = stream.ReadInt32(); result.Chunks = new ChunkEntry[chunksCount]; for (int i = 0; i < chunksCount; i++) { result.Chunks[i].Location = stream.ReadInt32(); result.Chunks[i].Size = stream.ReadInt32(); } result.HashCode = stream.ReadInt32(); return; } throw new SerializationException($"Invald {nameof(ContentStorageHeader)} version."); }
/// <summary> /// Creates the new storage container at the specified location and generates header for that. /// </summary> /// <param name="dataUrl">The file url.</param> /// <param name="chunksData">The chunks data.</param> /// <param name="header">The header data.</param> public static void Create(string dataUrl, List <byte[]> chunksData, out ContentStorageHeader header) { if (chunksData == null || chunksData.Count == 0 || chunksData.Any(x => x == null || x.Length == 0)) { throw new ArgumentException(nameof(chunksData)); } var packageTime = DateTime.UtcNow; // Sort chunks (smaller ones go first) int chunksCount = chunksData.Count; List <int> chunksOrder = new List <int>(chunksCount); for (int i = 0; i < chunksCount; i++) { chunksOrder.Add(i); } chunksOrder.Sort((a, b) => chunksData[a].Length - chunksData[b].Length); // Calculate header hash code (used to provide simple data verification during loading) // Note: this must match ContentStorage.GetHashCode() int hashCode = (int)packageTime.Ticks; hashCode = (hashCode * 397) ^ chunksCount; for (int i = 0; i < chunksCount; i++) { hashCode = (hashCode * 397) ^ chunksData[i].Length; } // Create header header = new ContentStorageHeader { DataUrl = dataUrl, PackageTime = packageTime, HashCode = hashCode, Chunks = new ContentStorageHeader.ChunkEntry[chunksCount] }; // Calculate chunks locations in the file int offset = 0; for (int i = 0; i < chunksCount; i++) { int chunkIndex = chunksOrder[i]; int size = chunksData[chunkIndex].Length; header.Chunks[chunkIndex].Location = offset; header.Chunks[chunkIndex].Size = size; offset += size; } // Create file with a raw data using (var outputStream = ContentManager.FileProvider.OpenStream(dataUrl, VirtualFileMode.Create, VirtualFileAccess.Write, VirtualFileShare.Read, StreamFlags.Seekable)) using (var stream = new BinaryWriter(outputStream)) { // Write data (one after another) for (int i = 0; i < chunksCount; i++) { stream.Write(chunksData[chunksOrder[i]]); } // Validate calculated offset if (offset != outputStream.Position) { throw new ContentStreamingException("Invalid storage offset."); } } }