Example #1
0
        /// <summary>
        /// Deserialzies the FileMetadata from stream.
        /// </summary>
        /// <param name="reader"></param>
        /// <param name="tracer"></param>
        /// <returns></returns>
        /// <remarks>
        /// Name                    || Size
        ///
        /// TotalNumberOfEntries       8
        /// NumberOfValidEntries       8
        /// NumberOfDeletedEntries     8
        /// TimeStamp                  8
        ///
        /// FileId                     4
        /// CanBeDeleted               1
        /// RESERVED                   3
        ///
        /// FileName                   N
        /// PADDING                    (N % 8 ==0) ? 0 : 8 - (N % 8)
        ///
        /// RESERVED: Fixed padding that is usable to add fields in future.
        /// PADDING:  Due to dynamic size, cannot be used for adding fields.
        ///
        /// </remarks>
        public static FileMetadata Read(InMemoryBinaryReader reader, string tracer)
        {
            Utility.Assert(reader.IsAligned(), "must be aligned");

            var totalNumberOfEntries   = reader.ReadInt64();
            var numberOfValidEntries   = reader.ReadInt64();
            var numberOfDeletedEntries = reader.ReadInt64();
            var timeStamp = reader.ReadInt64();

            var fileId = reader.ReadUInt32();

            reader.ReadPaddingUntilAligned(true);

            var fileName = reader.ReadString();

            reader.ReadPaddingUntilAligned(false);

            Utility.Assert(reader.IsAligned(), "must be aligned");

            return(new FileMetadata(tracer, fileId,
                                    fileName, totalNumberOfEntries,
                                    numberOfValidEntries, timeStamp,
                                    numberOfDeletedEntries, false,
                                    FileMetadata.InvalidTimeStamp,
                                    FileMetadata.InvalidTimeStamp));
        }
        /// <summary>
        /// Read key from file for merge.
        /// </summary>
        /// <remarks>
        /// The data is written is 8 bytes aligned.
        ///
        /// Name                    Type        Size
        ///
        /// KeySize                 int         4
        /// Kind                    byte        1
        /// RESERVED                            3
        /// VersionSequenceNumber   long        8
        ///
        /// (DeletedVersion)
        /// TimeStamp               long        8
        ///
        /// (Inserted || Updated)
        /// Offset                  long        8
        /// ValueChecksum           ulong       8
        /// ValueSize               int         4
        /// RESERVED                            4
        ///
        /// Key                     TKey        N
        /// PADDING                             (N % 8 ==0) ? 0 : 8 - (N % 8)
        ///
        /// RESERVED: Fixed padding that is usable to add fields in future.
        /// PADDING:  Due to dynamic size, cannot be used for adding fields.
        ///
        /// Note: Larges Key size supported is int.MaxValue in bytes.
        /// </remarks>
        public KeyData <TKey, TValue> ReadKey <TKey, TValue>(InMemoryBinaryReader memoryBuffer, IStateSerializer <TKey> keySerializer)
        {
            memoryBuffer.ThrowIfNotAligned();

            // This mirrors WriteKey().
            var keySize = memoryBuffer.ReadInt32();
            var kind    = (RecordKind)memoryBuffer.ReadByte();

            memoryBuffer.ReadPaddingUntilAligned(true);

            var lsn = memoryBuffer.ReadInt64();

            long  valueOffset   = 0;
            var   valueSize     = 0;
            ulong valueChecksum = 0;
            long  TimeStamp     = 0;

            if (kind == RecordKind.DeletedVersion)
            {
                TimeStamp = memoryBuffer.ReadInt64();
            }
            else
            {
                valueOffset   = memoryBuffer.ReadInt64();
                valueChecksum = memoryBuffer.ReadUInt64();
                valueSize     = memoryBuffer.ReadInt32();
                memoryBuffer.ReadPaddingUntilAligned(true);
            }

            // Protection in case the user's key serializer doesn't leave the stream at the correct end point.
            var keyPosition = memoryBuffer.BaseStream.Position;
            var key         = keySerializer.Read(memoryBuffer);

            memoryBuffer.BaseStream.Position = keyPosition + keySize;
            memoryBuffer.ReadPaddingUntilAligned(false);

            TVersionedItem <TValue> value = null;

            switch (kind)
            {
            case RecordKind.DeletedVersion:
                value = new TDeletedItem <TValue>(lsn, this.FileId);
                break;

            case RecordKind.InsertedVersion:
                value = new TInsertedItem <TValue>(lsn, this.FileId, valueOffset, valueSize, valueChecksum);
                break;

            case RecordKind.UpdatedVersion:
                value = new TUpdatedItem <TValue>(lsn, this.FileId, valueOffset, valueSize, valueChecksum);
                break;

            default:
                throw new InvalidDataException(string.Format(CultureInfo.CurrentCulture, SR.Error_KeyCheckpointFile_RecordKind, (byte)kind));
            }

            return(new KeyData <TKey, TValue>(key, value, TimeStamp));
        }
        private static async Task <ConditionalValue <CheckpointManager <T> > > TryReadCheckpointFile(
            string directory,
            string fileName,
            IStateSerializer <T> valueSerializer,
            string traceType)
        {
            var filePath = Path.Combine(directory, fileName);

            if (!FabricFile.Exists(filePath))
            {
                return(new ConditionalValue <CheckpointManager <T> >(false, default(CheckpointManager <T>)));
            }

            using (var stream = FabricFile.Open(filePath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.SequentialScan))
            {
                var intSegment = new ArraySegment <byte>(new byte[sizeof(int)]);

                var versionRead = await SerializationHelper.ReadIntAsync(intSegment, stream).ConfigureAwait(false);

                if (versionRead != FileVersion)
                {
                    throw new InvalidDataException(string.Format("versionRead '{0}' != FileVersion '{1}'", versionRead, FileVersion));
                }

                var nameLength = await SerializationHelper.ReadIntAsync(intSegment, stream).ConfigureAwait(false);

                if (nameLength < 0)
                {
                    throw new InvalidDataException(string.Format("nameLength '{0}' < 0", nameLength));
                }

                if (nameLength == 0)
                {
                    return(new ConditionalValue <CheckpointManager <T> >(true, new CheckpointManager <T>(null, directory, fileName, valueSerializer, traceType)));
                }

                var nameSegment = new ArraySegment <byte>(new byte[nameLength]);
                await SerializationHelper.ReadBytesAsync(nameSegment, nameLength, stream).ConfigureAwait(false);

                string name;

                using (var reader = new InMemoryBinaryReader(new MemoryStream(nameSegment.Array)))
                {
                    name = reader.ReadString();
                }

                var path = Path.Combine(directory, name);
                if (!FabricFile.Exists(path))
                {
                    throw new InvalidDataException(string.Format("Current checkpoint file does not exist: {0}", path));
                }

                return(new ConditionalValue <CheckpointManager <T> >(
                           true,
                           new CheckpointManager <T>(new Checkpoint <T>(directory, name, valueSerializer, traceType), directory, fileName, valueSerializer, traceType)));
            }
        }
        public static PropertyChunkMetadata Read(InMemoryBinaryReader reader)
        {
            reader.ThrowIfNotAligned();

            var currentBlockSize = reader.ReadInt32();

            reader.ReadPaddingUntilAligned(true);

            reader.ThrowIfNotAligned();

            return(new PropertyChunkMetadata(currentBlockSize));
        }
Example #5
0
        public static async Task <IEnumerable <IListElement <T> > > ReadAsync(Stream stream, IStateSerializer <T> stateSerializer, string traceType)
        {
            var listElementsCountSegment = new ArraySegment <byte>(new byte[sizeof(int)]);
            var listElementsBytesSegment = new ArraySegment <byte>(new byte[sizeof(int)]);

            var listElementsCount = await SerializationHelper.ReadIntAsync(listElementsCountSegment, stream).ConfigureAwait(false);

            if (listElementsCount < 0)
            {
                throw new InvalidDataException(string.Format("Unexpected listElementsCount: {0}", listElementsCount));
            }

            var listElementsBytes = await SerializationHelper.ReadIntAsync(listElementsBytesSegment, stream).ConfigureAwait(false);

            if (listElementsBytes < 0)
            {
                throw new InvalidDataException(string.Format("Unexpected listElementsBytes: {0}", listElementsBytes));
            }

            using (var reader = new InMemoryBinaryReader(new MemoryStream()))
            {
                reader.BaseStream.SetLength(listElementsBytes + sizeof(ulong));
                await
                SerializationHelper.ReadBytesAsync(new ArraySegment <byte>(reader.BaseStream.GetBuffer()), listElementsBytes + sizeof(ulong), stream)
                .ConfigureAwait(false);

                var listElements = new IListElement <T> [listElementsCount];
                for (var i = 0; i < listElementsCount; i++)
                {
                    var id    = reader.ReadInt64();
                    var value = stateSerializer.Read(reader); // if this tries to read beyond the end of the stream, listElementsBytes was incorrect (too small)
                    listElements[i] = DataStore <T> .CreateQueueListElement(id, value, traceType, ListElementState.EnqueueApplied);
                }

                var readCRC = reader.ReadUInt64();
                var calcCRC =
                    CRC64.ToCRC64(new[] { listElementsCountSegment, listElementsBytesSegment, new ArraySegment <byte>(reader.BaseStream.GetBuffer(), 0, listElementsBytes), });
                if (readCRC != calcCRC)
                {
                    throw new InvalidDataException(string.Format("CRC mismatch.  Read: {0} Calculated: {1}", readCRC, calcCRC));
                }

                return(listElements);
            }
        }
        public void Dispose()
        {
            //Dispose memory stream, file stream,

            if (this.reader != null)
            {
                this.reader.Dispose();
                this.reader = null;
            }

            if (this.memoryStream != null)
            {
                this.memoryStream.Dispose();
                this.memoryStream = null;
            }

            this.keyCheckpointFile.ReaderPool.ReleaseStream(this.fileStream, true);
            this.fileStream = null;
        }
Example #7
0
        /// <summary>
        /// Read all file metadata from the metadata file.
        /// </summary>
        /// <param name="metadataTable">The metadata table.</param>
        /// <param name="filestream">The file stream to read from.</param>
        /// <param name="properties">The metadata manager file properties.</param>
        /// <param name="traceType">Tracing information.</param>
        /// <returns></returns>
        private static async Task <int> ReadDiskMetadataAsync(
            Dictionary <uint, FileMetadata> metadataTable, Stream filestream, MetadataManagerFileProperties properties, string traceType)
        {
            var startOffset   = properties.MetadataHandle.Offset;
            var endOffset     = properties.MetadataHandle.EndOffset;
            var metadataCount = 0;
            var fileId        = 0;

            // No metadata to read (there are no metadata chunks).
            if (startOffset + sizeof(int) >= endOffset)
            {
                return(fileId);
            }

            filestream.Position = startOffset;

            using (var metadataStream = new MemoryStream(capacity: 64 * 1024))
                using (var metadataReader = new InMemoryBinaryReader(metadataStream))
                {
                    // Read the first key chunk size into memory.
                    metadataStream.SetLength(64 * 1024);
                    await filestream.ReadAsync(metadataStream.GetBuffer(), 0, PropertyChunkMetadata.Size).ConfigureAwait(false);

                    var propertyChunkMetadata = PropertyChunkMetadata.Read(metadataReader);
                    var chunkSize             = propertyChunkMetadata.BlockSize;
                    filestream.Position -= PropertyChunkMetadata.Size;

                    while (filestream.Position + chunkSize + sizeof(ulong) <= endOffset)
                    {
                        // Consistency checks.
                        if (chunkSize < 0)
                        {
                            throw new InvalidDataException(string.Format(CultureInfo.CurrentCulture, SR.Error_Metadata_Corrupt_NegativeSize_OneArgs, chunkSize));
                        }

                        // Read the entire chunk (plus the checksum and next chunk size) into memory.
                        metadataStream.SetLength(chunkSize + sizeof(ulong) + sizeof(int));
                        await filestream.ReadAsync(metadataStream.GetBuffer(), 0, chunkSize + sizeof(ulong) + sizeof(int)).ConfigureAwait(false);

                        // Read the checksum.
                        metadataStream.Position = chunkSize;
                        var checksum = metadataReader.ReadUInt64();

                        // Re-compute the checksum.
                        var expectedChecksum = CRC64.ToCRC64(metadataStream.GetBuffer(), 0, chunkSize);
                        if (checksum != expectedChecksum)
                        {
                            throw new InvalidDataException(string.Format(CultureInfo.CurrentCulture, SR.Error_Metadata_Corrupt_ChecksumMismatch_TwoArgs, checksum, expectedChecksum));
                        }

                        // Deserialize the value into memory.
                        metadataStream.Position = sizeof(int);
                        metadataReader.ReadPaddingUntilAligned(true);
                        while (metadataStream.Position < chunkSize)
                        {
                            var fileMetadata = FileMetadata.Read(metadataReader, traceType);
                            if (metadataTable.ContainsKey(fileMetadata.FileId))
                            {
                                throw new InvalidDataException(string.Format(CultureInfo.CurrentCulture, SR.Error_DuplicateFileId_Found_OneArgs, fileMetadata.FileId));
                            }

                            metadataTable.Add(fileMetadata.FileId, fileMetadata);
                            metadataCount++;
                        }

                        // Read the next chunk size.
                        chunkSize            = BitConverter.ToInt32(metadataStream.GetBuffer(), chunkSize + sizeof(ulong));
                        filestream.Position -= sizeof(int);
                    }

                    // Consistency checks.
                    if (filestream.Position != endOffset)
                    {
                        throw new InvalidDataException(SR.Error_Metadata_Corrupt_IncorrectSize);
                    }

                    if (metadataCount != properties.FileCount)
                    {
                        throw new InvalidDataException(string.Format(CultureInfo.CurrentCulture, SR.Error_Metadata_Corrupt_FileCountMismatch_TwoArgs, metadataCount, properties.FileCount));
                    }

                    return(fileId);
                }
        }
        public async Task <bool> MoveNextAsync(CancellationToken cancellationToken)
        {
            // Starting from state zero.
            if (this.stateZero)
            {
                // Assert that startOffset - endOffset is a multiple of 4k

                this.stateZero   = false;
                this.itemsBuffer = new List <KeyData <TKey, TValue> >();
                this.index       = 0;

                // Call read keys and populate list.
                this.memoryStream = new MemoryStream(capacity: ChunkSize);
                this.reader       = new InMemoryBinaryReader(this.memoryStream);

                // Assert file stream is null;
                Diagnostics.Assert(this.fileStream == null, this.traceType, "fileSteam should be null to start with.");
                this.fileStream = this.keyCheckpointFile.ReaderPool.AcquireStream();

                var snapFileStream = this.fileStream as FileStream;
                Diagnostics.Assert(snapFileStream != null, this.traceType, "fileStream must be a FileStream");
                Microsoft.ServiceFabric.Replicator.Utility.SetIoPriorityHint(snapFileStream.SafeFileHandle, this.priorityHint);

                this.fileStream.Position = this.startOffset;

                var result = await this.ReadChunkAsync().ConfigureAwait(false);

                if (result)
                {
                    this.Current = this.itemsBuffer[this.index];
                    return(true);
                }
                else
                {
                    Diagnostics.Assert(this.keyCount == this.keyCheckpointFile.Properties.KeyCount, this.traceType, "Failed to read the expected number of keys.");
                    return(false);
                }
            }
            else
            {
                this.index++;

                // Check if it is in the buffer.
                if (this.index < this.itemsBuffer.Count)
                {
                    this.Current = this.itemsBuffer[this.index];
                    return(true);
                }
                else
                {
                    // read next block
                    var result = await this.ReadChunkAsync().ConfigureAwait(false);

                    if (result)
                    {
                        this.Current = this.itemsBuffer[this.index];
                        return(true);
                    }
                    else
                    {
                        Diagnostics.Assert(this.keyCount == this.keyCheckpointFile.Properties.KeyCount, this.traceType, "Failed to read the expected number of keys.");
                        return(false);
                    }
                }
            }
        }