/// <summary> /// Create and return a table that maps samples to chunks. /// </summary> /// <param name="byteStreamDataReader">a byte stream reader to read the sample size table from.</param> /// <returns>the sample to chunk table</returns> internal IList <SampleToChunkTableEntry> GetSampleToChunkTable(ByteStreamDataReader byteStreamDataReader) { IList <SampleToChunkTableEntry> sampleToChunkTable = new List <SampleToChunkTableEntry>(); if (!Valid) { return(sampleToChunkTable); } // Set position to number of entries byteStreamDataReader.Position = Offset + 12; uint numberOfEntries = (uint)byteStreamDataReader.GetInt(); for (int entryIndex = 0; entryIndex < numberOfEntries; entryIndex++) { uint firstChunk = (uint)byteStreamDataReader.GetInt(); uint samplesPerChunk = (uint)byteStreamDataReader.GetInt(); uint sampleDescriptionID = (uint)byteStreamDataReader.GetInt(); SampleToChunkTableEntry sampleToChunkTableEntry = new SampleToChunkTableEntry(firstChunk, samplesPerChunk, sampleDescriptionID); sampleToChunkTable.Add(sampleToChunkTableEntry); } return(sampleToChunkTable); }
internal IList <long> GetOffsetTable(ByteStreamDataReader byteStreamDataReader) { List <long> offsetTable = new List <long>(); if (!Valid) { return(offsetTable); } // Set position to number of entries byteStreamDataReader.Position = Offset + 12; uint numberOfEntries = (uint)byteStreamDataReader.GetInt(); for (int entryIndex = 0; entryIndex < numberOfEntries; entryIndex++) { switch (_atomType) { case AtomType.ChunkOffset32Bit: offsetTable.Add((uint)byteStreamDataReader.GetInt()); break; case AtomType.ChunkOffset64Bit: offsetTable.Add(byteStreamDataReader.GetLong()); break; } } return(offsetTable); }
public void TearDown() { _inputFile = null; _dataReader = null; _byteStreamDataReader = null; _byteStreamParser = null; _header = null; _childHeader = null; _grandChildHeader = null; _mockAttributeHeader = null; }
public void SetUp() { _inputFile = MockRepository.GenerateStub <IInputFile>(); _inputFile.Stub(x => x.Name).Return(TestFileName); _inputFile.Stub(x => x.Length).Return(TestFileLength); _dataReader = new MockDataReader(MockData, _inputFile); _byteStreamDataReader = new ByteStreamDataReader(_dataReader); _byteStreamParser = new MockByteStreamParser(_byteStreamDataReader); _header = new MockHeader(Enumerable.Repeat(_detector, 1), MockHeaderName.Root); _childHeader = new MockHeader(_header, MockHeaderName.MockHeaderTypeTwo); _grandChildHeader = new MockSubHeader(_childHeader); _mockAttributeHeader = new MockAttributeHeader(_grandChildHeader); }
public static bool IsDataPacket(this byte[] guidArray, ByteStreamDataReader dataReader) { if (guidArray[0] == 0x11 && guidArray[1] == 0x5D && (guidArray[10] & 0xc0) == 0x80 && ((guidArray[11] & 0x7F) == 1 || (guidArray[11] & 0x7F) == 2)) { if (dataReader.Position + 2 < dataReader.Length) { dataReader.GetByte(); // Ignore byte byte replicatedDataLength = dataReader.GetByte(); dataReader.Position -= 2; if (replicatedDataLength == 8) { return(true); } } } return(false); }
override public IDataBlock DetectData(IDataReader dataReader, IDataBlockBuilder dataBlockBuilder, IScanContext context) { ByteStreamDataReader byteStreamDataReader = new ByteStreamDataReader(dataReader); int c = (byteStreamDataReader.GetByte() % 51); byteStreamDataReader.Position--; switch (c) { case 0: return(DetectData1(byteStreamDataReader, dataBlockBuilder, context)); case 4: return(DetectData2(byteStreamDataReader, dataBlockBuilder, context)); case 7: return(DetectData3(byteStreamDataReader, dataBlockBuilder, context)); } dataBlockBuilder.IsFullFile = false; return(null); }
/// <summary> /// A table containing the sample size information. The sample size /// table contains an entry for every sample in the media’s data /// stream. Each table entry contains a size field. The size field /// contains the size, in bytes, of the sample in question. The table /// is indexed by sample number—the first entry corresponds to the /// first sample, the second entry is for the second sample, and so on. /// </summary> /// <param name="byteStreamDataReader">a byte stream reader to read the sample size table from.</param> /// <returns>The sample size table.</returns> internal IList <uint> GetSampleSizeTable(ByteStreamDataReader byteStreamDataReader) { List <uint> sampleSizeTable = new List <uint>(); if (!Valid) { return(sampleSizeTable); } if (GetSampleSizeValue(byteStreamDataReader) == 0) { // Set position to number of entries byteStreamDataReader.Position = Offset + 16; uint numberOfEntries = (uint)byteStreamDataReader.GetInt(); for (int entryIndex = 0; entryIndex < numberOfEntries; entryIndex++) { sampleSizeTable.Add((uint)byteStreamDataReader.GetInt()); } } return(sampleSizeTable); }
/// <summary> /// A 32-bit integer specifying the sample size. If all the /// samples are the same size, this field contains that size /// value. If this field is set to 0, then the samples have /// different sizes, and those sizes are stored in the sample /// size table. Get this table using method /// <code>GetSampleToChunkTable()</code>. /// </summary> /// <param name="byteStreamDataReader">a byte stream reader to read the sample size table from.</param> /// <returns>The sample size value.</returns> internal uint GetSampleSizeValue(ByteStreamDataReader byteStreamDataReader) { byteStreamDataReader.Position = Offset + 12; // Set position to sample size value return((uint)byteStreamDataReader.GetInt()); }
override public IDataBlock DetectData(IDataReader dataReader, IDataBlockBuilder dataBlockBuilder, IScanContext context) { IResultNode root = new MockResult(this); ByteStreamDataReader byteStreamDataReader = new ByteStreamDataReader(dataReader); dataReader = byteStreamDataReader; long dataReaderOffset = dataReader.GetDataPacket(0, 1).StartOffset; long initialPosition = dataReader.Position + dataReaderOffset; if (initialPosition == 0 && dataReader.Length >= 268) { dataReader.Position = 0; long offset = 0L; long length; IResultNode parent = root; if (byteStreamDataReader.GetByte() == 69) { offset = 0L; length = 1L; parent = new MockResult1(parent, dataReader, offset, length); } long firstDataBlockOffset = offset; dataReader.Position = 255; if (byteStreamDataReader.GetByte() == 255) { offset = 255L; length = 2L; parent = new MockResult2(parent, dataReader, offset, length); } dataReader.Position = 260; if (byteStreamDataReader.GetByte() == 78) { offset = 260L; length = 3L; new MockResult1(parent, dataReader, offset, length); } offset = 264; length = 4; dataReader.Position = 264; if (byteStreamDataReader.GetByte() == 5) { new MockResult2(parent, dataReader, offset, length); } context.Results = root; var firstChild = ((MockResult)root.Children[0]); dataBlockBuilder.StartOffset = firstChild.Offset; var lastChild = ((MockResult)root.GetLastDescendant()); dataBlockBuilder.EndOffset = (lastChild.Offset + lastChild.Length); dataBlockBuilder.IsFullFile = false; return(dataBlockBuilder.Build()); } if (initialPosition <= 517 && (initialPosition + dataReader.Length) >= 518) { long offset = 517L - dataReaderOffset; const long length = 1L; dataReader.Position = offset; if (byteStreamDataReader.GetByte() == 2) { new MockResult1(root, dataReader, offset, length); } context.Results = root; var firstChild = ((MockResult)root.Children[0]); dataBlockBuilder.StartOffset = firstChild.Offset; var lastChild = ((MockResult)root.GetLastDescendant()); dataBlockBuilder.EndOffset = (lastChild.Offset + lastChild.Length); dataBlockBuilder.IsFullFile = (root.Children.Count == 1); return(dataBlockBuilder.Build()); } return(null); }
public ExampleReader(ByteStreamDataReader dataReader) { _dataReader = dataReader; }
public AsfParser(ByteStreamDataReader dataReader) : base(dataReader) { //DataReader = dataReader; }
public AviParser(ByteStreamDataReader dataReader) : base(dataReader) { }
public MockByteStreamParser(ByteStreamDataReader dataReader) : base(dataReader) { }
/// <summary> /// /// </summary> /// <param name="dataReader"></param> /// <returns>true if the track was completely validated</returns> public void Read(IDataReader dataReader) { _extraData = GetExtraData(dataReader); if (_requiredAtomsMissing) { // TODO: perhaps we have an 'esd' or 'avcC' atom!? return; // Required atoms missing for chunks, but 'mdat' may still be valid. } var byteStreamDataReader = new ByteStreamDataReader(dataReader); var chunkOffsetTable = _chunkOffset.GetOffsetTable(byteStreamDataReader); var sampleSizeTable = _sampleSize.GetSampleSizeTable(byteStreamDataReader); var sampleToChunkTable = _sampleToChunk.GetSampleToChunkTable(byteStreamDataReader); var sampleSizeValue = ((sampleSizeTable == null) || (sampleSizeTable.Count == 0)) ? _sampleSize.GetSampleSizeValue(byteStreamDataReader) : 0; // Add chunks that are contained within the 'mdat' atom int sampleToChunkTableIndex = 0; int sampleSizeTableIndex = 0; for (int chunkIndex = 0; chunkIndex < chunkOffsetTable.Count; chunkIndex++) { int chunkNumber = (chunkIndex + 1); sampleToChunkTableIndex = FindSampleToChunkTableIndex(sampleToChunkTable, sampleToChunkTableIndex, chunkNumber); long offset = chunkOffsetTable[chunkIndex]; uint samplesPerChunk = sampleToChunkTable[sampleToChunkTableIndex].SamplesPerChunk; if ((sampleSizeTable == null) || (sampleSizeTable.Count == 0)) { long size = (samplesPerChunk * sampleSizeValue); if (!CheckSampleOrChunk(dataReader, offset, size)) { return; } _chunks.Add(new Chunk(offset, size)); } else if ((sampleSizeTableIndex + samplesPerChunk) <= sampleSizeTable.Count) { long totalSize = 0; for (int j = 0; j < samplesPerChunk; j++) { uint size = sampleSizeTable[sampleSizeTableIndex++]; if (!CheckSampleOrChunk(dataReader, (offset + totalSize), size)) { if (totalSize > 0) { // Add partially valid chunk _chunks.Add(new Chunk(offset, totalSize)); } return; } totalSize += size; } if (totalSize > 0) { _chunks.Add(new Chunk(offset, totalSize)); } } } }