/// <summary> /// Write the state provider metadata to the output stream. /// </summary> /// <param name="outputstream">Output stream.</param> /// <param name="metadataList">The state providers' metadata.</param> /// <param name="cancellationToken">Token used to signal cancellation.</param> /// <returns>Task that represents the asynchronous operation.</returns> private async Task <StateManagerFileBlocks> WriteMetadataAsync( Stream outputstream, IEnumerable <SerializableMetadata> metadataList, CancellationToken cancellationToken) { // Track blocks of records, to improve sequential reads. var recordBlockSizes = new List <int>(); var recordBlockSize = 0; using (var itemStream = new MemoryStream(DesiredBlockSize * 2)) using (var itemWriter = new InMemoryBinaryWriter(itemStream)) { foreach (var metadata in metadataList) { // Write each state provider metadata into the memory stream. var recordPosition = checked ((int)itemStream.Position); var recordSize = WriteMetadata(itemWriter, metadata); // Checksum this metadata record (with the record size included). var recordChecksum = CRC64.ToCRC64(itemStream.GetBuffer(), recordPosition, recordSize); // Add the checksum at the end of the memory stream. itemWriter.Write(recordChecksum); // Track record block sizes. After enough bytes are buffered in memory, flush to disk. recordBlockSize = checked ((int)itemStream.Position); if (recordBlockSize >= DesiredBlockSize) { // Flush the metadata records to the output stream. await outputstream.WriteAsync(itemStream.GetBuffer(), 0, recordBlockSize, cancellationToken).ConfigureAwait(false); recordBlockSizes.Add(recordBlockSize); itemStream.Position = 0; } // Update properties. this.Properties.StateProviderCount++; if (metadata.ParentStateProviderId == DynamicStateManager.EmptyStateProviderId) { this.Properties.RootStateProviderCount++; } } // Flush any remaining bytes to disk, and add the last record block size (if any). recordBlockSize = checked ((int)itemStream.Position); if (recordBlockSize > 0) { await outputstream.WriteAsync(itemStream.GetBuffer(), 0, recordBlockSize, cancellationToken).ConfigureAwait(false); recordBlockSizes.Add(recordBlockSize); } } // Return the record block sizes. var blocks = new StateManagerFileBlocks(recordBlockSizes.ToArray()); return(blocks); }
/// <summary> /// Deserialize a <see cref="StateManagerFileBlocks"/> from the given stream. /// </summary> /// <param name="reader">Stream to deserialize from.</param> /// <param name="handle">Starting offset and size within the stream for the table index.</param> /// <returns>The deserialized <see cref="StateManagerFileBlocks"/>.</returns> public static StateManagerFileBlocks Read(BinaryReader reader, BlockHandle handle) { if (handle.Size == 0) { return(null); } var blocks = new StateManagerFileBlocks(); reader.BaseStream.Position = handle.Offset; // Read the record block sizes. blocks.RecordBlockSizes = ReadArray(reader); // Validate the section was read correctly. if (reader.BaseStream.Position != handle.EndOffset) { throw new InvalidDataException(SR.Error_SMBlocksCorrupt); } return(blocks); }
/// <summary> /// Read the list of state providers' metadata from the <see cref="StateManagerFile"/>. /// </summary> /// <param name="stream">Stream to read from.</param> /// <param name="traceType">Tracing type information.</param> /// <param name="deserializeTypes">Should the types be deserialized.</param> /// <param name="cancellationToken">Token used to signal cancellation.</param> /// <returns>The list of state providers' metadata read.</returns> private async Task <List <SerializableMetadata> > ReadMetadataAsync( Stream stream, string traceType, bool deserializeTypes, CancellationToken cancellationToken) { var metadataList = new List <SerializableMetadata>((int)this.properties.StateProviderCount); // Read the blocks, to improve sequential reads. var blocks = await FileBlock.ReadBlockAsync( stream, this.properties.BlocksHandle, (sectionReader, sectionHandle) => StateManagerFileBlocks.Read(sectionReader, sectionHandle)).ConfigureAwait(false); // Currently, we expect the block sizes to always be present. if (blocks == null || blocks.RecordBlockSizes == null) { throw new InvalidDataException(string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_BlockSizesMissing)); } var recordBlockSizes = blocks.RecordBlockSizes; // Read blocks from the file. Each state provider metadata is checksummed individually. using (var itemStream = new MemoryStream(DesiredBlockSize * 2)) using (var itemReader = new InMemoryBinaryReader(itemStream)) { stream.Position = this.properties.MetadataHandle.Offset; var endOffset = this.properties.MetadataHandle.EndOffset; // Each block has one or more state provider metadata records. foreach (var recordBlockSize in recordBlockSizes) { if (stream.Position + recordBlockSize > endOffset) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_BlockExtendPastFile)); } // Read the block into memory. itemStream.Position = 0; itemStream.SetLength(recordBlockSize); await stream.ReadAsync(itemStream.GetBuffer(), 0, recordBlockSize, cancellationToken).ConfigureAwait(false); // Read to the end of the metadata section. var endBlockOffset = itemStream.Length; while (itemStream.Position < endBlockOffset) { var position = itemStream.Position; // Read the record size and validate it is not obviously corrupted. if (position + sizeof(int) > endBlockOffset) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_MissingRecordSize)); } var recordSize = itemReader.ReadInt32(); var recordSizeWithChecksum = recordSize + sizeof(ulong); // We need to do extra validation on the recordSize, because we haven't validated the bits // against the checksum and we need the recordSize to locate the checksum. if (recordSize < 0) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_NegativeRecordSize)); } if (position + recordSize > endBlockOffset) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_RecordExtendsPastFile)); } if (position + recordSizeWithChecksum > endBlockOffset) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_MissingChecksum)); } // Compute the checksum. var computedChecksum = CRC64.ToCRC64( itemStream.GetBuffer(), checked ((int)position), recordSize); // Read the checksum (checksum is after the record bytes). itemStream.Position = position + recordSize; var checksum = itemReader.ReadUInt64(); // Verify the checksum. if (checksum != computedChecksum) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_MismatchedChecksum)); } // Read and re-create the state provider metadata, now that the checksum is validated. itemStream.Position = position; var metadata = ReadMetadata(itemReader, traceType, deserializeTypes); metadataList.Add(metadata); itemStream.Position = position + recordSizeWithChecksum; } } } // Post-validation. if (metadataList.Count != (int)this.Properties.StateProviderCount) { throw new InvalidDataException( string.Format(CultureInfo.CurrentCulture, SR.Error_SMFile_Corrupt_MetadataCountMismatch)); } return(metadataList); }