public void HandleBlock(string type, MLVTypes.mlv_audf_hdr_t header, byte[] rawData, int rawPos, int rawLength) { AudfHeader = header; if (WaveProvider != null) { while (WaveProvider.BufferedBytes + (int)(rawLength - header.frameSpace) > WaveProvider.BufferLength) { Thread.Sleep(100); } WaveProvider.AddSamples(rawData, (int)(rawPos + header.frameSpace), (int)(rawLength - header.frameSpace)); } }
public virtual void BuildFrameIndex() { if (Reader == null) { return; } HighestVideoFrameNumber = 0; TotalVideoFrameCount = 0; Dictionary <uint, frameXrefEntry> vidfXrefList = new Dictionary <uint, frameXrefEntry>(); Dictionary <uint, frameXrefEntry> audfXrefList = new Dictionary <uint, frameXrefEntry>(); MetadataContainer metadataContainer = new MetadataContainer(); uint highestFrameNumber = 0; for (int blockIndexPos = 0; blockIndexPos < BlockIndex.Length; blockIndexPos++) { var block = BlockIndex[blockIndexPos]; Reader[block.fileNumber].BaseStream.Position = block.position; /* 16 bytes are enough for size, type and timestamp, but we try to read all blocks up to 1k */ byte[] buf = new byte[1024]; /* read MLV block header */ if (Reader[block.fileNumber].Read(buf, 0, 16) != 16) { break; } uint size = BitConverter.ToUInt32(buf, 4); string type = Encoding.UTF8.GetString(buf, 0, 4); UInt64 timestamp = BitConverter.ToUInt64(buf, 8); /* read that block, up to 256 byte */ Reader[block.fileNumber].BaseStream.Position = block.position; /* read MLV block header */ int readSize = (int)Math.Min(size, 256); if (Reader[block.fileNumber].Read(buf, 0, readSize) != readSize) { break; } object blockData = MLVTypes.ToStruct(buf); switch (type) { case "NULL": continue; case "VIDF": { MLVTypes.mlv_vidf_hdr_t header = (MLVTypes.mlv_vidf_hdr_t)blockData; if (!vidfXrefList.ContainsKey(header.frameNumber)) { frameXrefEntry entry = new frameXrefEntry(); entry.blockIndexPos = blockIndexPos; entry.metadata = metadataContainer.Metadata; entry.timestamp = timestamp; vidfXrefList.Add(header.frameNumber, entry); } else { FrameRedundantErrors++; } highestFrameNumber = Math.Max(highestFrameNumber, header.frameNumber); } break; case "AUDF": { MLVTypes.mlv_audf_hdr_t header = (MLVTypes.mlv_audf_hdr_t)blockData; if (!audfXrefList.ContainsKey(header.frameNumber)) { frameXrefEntry entry = new frameXrefEntry(); entry.blockIndexPos = blockIndexPos; entry.metadata = metadataContainer.Metadata; entry.timestamp = timestamp; audfXrefList.Add(header.frameNumber, entry); } else { FrameRedundantErrors++; } } break; default: metadataContainer.Update(type, blockData); break; } } /* count the number of missing video frames */ uint curFrame = 0; foreach (var elem in vidfXrefList.OrderBy(elem => elem.Key)) { if (elem.Key != curFrame) { curFrame = elem.Key; FrameMissingErrors++; } curFrame++; } VidfXrefList = vidfXrefList; AudfXrefList = audfXrefList; TotalVideoFrameCount = (uint)vidfXrefList.Count; HighestVideoFrameNumber = highestFrameNumber; }