public override Slice GetSample(StreamDataBlockInfo SampleInfo) { ADTSDataBlockInfo adtsInfo = (ADTSDataBlockInfo)SampleInfo; Slice ans = new Slice(); ans.Copy(SampleInfo); #if ADTS if (adtsInfo.PESandADTSHeaders == null) // ADTS header may be absent, in which case we use the normal base.GetSample { return(base.GetSample(SampleInfo)); } int headerCount = adtsInfo.PESandADTSHeaders.Length; ans.SliceBytes = new byte[adtsInfo.SliceSize]; // SampleSize has already been incremented by length of PES + ADTS header adtsInfo.PESandADTSHeaders.CopyTo(ans.SliceBytes, 0); //if (ParentStream.Stream.Position != (long)adtsInfo.StreamOffset) // ParentStream.Stream.Position = (long)adtsInfo.StreamOffset; // this if statement for debugging: just to be able to put a breakpoint here BinaryReader reader = new BinaryReader(new MemoryStream(adtsInfo.SliceBytes)); //ParentStream.Stream.Read(ans.SliceBytes, headerCount, adtsInfo.SliceSize - headerCount); reader.Read(ans.SliceBytes, headerCount, adtsInfo.SliceSize - headerCount); #else ans.SliceBytes = adtsInfo.SliceBytes; #endif return(ans); }
/// <summary> /// PrepareSampleReading /// In MP4, reading of box headers is separate from reading of the H264 and audio bits. This is because the bits are stored /// in a different place in the file (or may in fact be in a separate file). In a QBox file, however, both headers and bits /// are stored in the qbox. It makes no sense to separate the two. Therefore, in this implementation of PrepareSampleReading, /// we actually read the bits together with the headers. The routine WriteSamples doesn't do much. /// /// There are two signatures for this method: one that accepts qbox indices (this one), and another that accepts ulong start /// and end times. /// /// We don't keep the qboxes. QBoxes already processed are disposed of as a last step. If we run out of qboxes, we read-in /// more. /// </summary> /// <param name="inStartSampleIndex">int index to first qbox to be processed</param> /// <param name="inEndSampleIndex">int index to last qbox to be processed</param> /// <param name="dummy">not used</param> /// <returns></returns> public override List <StreamDataBlockInfo> PrepareSampleReading(int inStartSampleIndex, int inEndSampleIndex, ref ulong dummy) { List <StreamDataBlockInfo> retList = new List <StreamDataBlockInfo>(); if (_qBoxes.Count == 0) { return(retList); } float scaleFactor = TimeSpan.FromSeconds(1.0).Ticks / this.TimeScale; bool foundFirstSlice = false; int boxCount = 0; // we traverse the _qBoxes list from the beginning; // can't use foreach because _qBoxes can change; // box.mIndex is NOT the same as index i. // we use a for loop only because we are adding qboxes to _qBoxes as part of the loop for (int i = 0; i < _qBoxes.Count; i++) { QBox box = _qBoxes[i]; boxCount++; // reject qboxes with sample size zero (no data) if (box.mSampleSize == 0) { continue; } // we shouldn't be searching for the first box of interest, because it should always be the first one // it should always be the first one because we threw away all boxes already processed if (((ulong)inStartSampleIndex > (box.mFrameCounter - 1)) || ((!foundFirstSlice) && (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0))) { continue; // skip } else if ((ulong)inStartSampleIndex == (box.mFrameCounter - 1)) { foundFirstSlice = true; } else if (!foundFirstSlice) { _qBoxes.Clear(); base.GetNextBatch(0, inStartSampleIndex); // throw new Exception("First IFrame not found"); i = -1; // this gets incremented to zero boxCount = 0; // start all over continue; } StreamDataBlockInfo datBlock = new Slice(); switch (box.SampleStreamTypeString()) { case "AAC": datBlock = new ADTSDataBlockInfo(); datBlock.SliceType = SliceType.AAC; break; case "Q711": case "PCM": datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for PCM break; case "MP2A": datBlock.SliceType = SliceType.MP4A; break; case "Q722": // ADPCM case "Q726": case "Q728": datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for ADPCM break; case "H264": case "H264_SLICE": datBlock = new NaluDelimiterBlockInfo(); if (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0) { datBlock.SliceType = SliceType.DFrame; } else { datBlock.SliceType = SliceType.IFrame; } if ((box.mSample != null) && (box.mSample.v != null)) { NaluDelimiterBlockInfo blockInfo = datBlock as NaluDelimiterBlockInfo; blockInfo.AccessUnitDelimiter = box.mSample.v.aud; } break; case "JPEG": datBlock.SliceType = SliceType.JPEG; break; case "MPEG2_ELEMENTARY": datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for MPEG2 break; case "VIN_STATS_GLOBAL": case "VIN_STATS_MB": case "USER_METADATA": case "DEBUG": default: System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", box.SampleStreamTypeString()); break; } datBlock.CTS = (ulong)((box.mSampleCTS - (box.mStreamDuration - box.mSampleDuration)) * scaleFactor); datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration); if (box.mFrameCounter == 0 && box.mStreamDuration == 0) { datBlock.TimeStampNew = 0; } else if (box.mStreamDuration == 0) { datBlock.TimeStampNew = null; } else { datBlock.TimeStampNew = (ulong)(scaleFactor * (box.mStreamDuration - box.mSampleDuration)); } datBlock.SliceSize = box.mSampleSize; datBlock.index = (int)box.mFrameCounter - 1; // boxCount; // NOTE! For qbox, StreamOffset has a different meaning than in MP4. // Here, StreamOffset is the offset to the qbox itself; whereas in // MP4, StreamOffset is the offset to the H264 payload. // In GenericMediaTrack.GetSample, StreamOffset is used as in MP4, but // this method is overriden by another in QBoxVideoTrack that does not use StreamOffset. // For flashback to work for both MP4 and qbox files, the caching mechanism // is different in MP4 from than in qbox. datBlock.StreamOffset = (ulong)box.mHeaderPosition; // needed for flashback to work // set payload Slice slice = datBlock as Slice; slice.SliceBytes = box.mSample.mPayload; #if ADTS if (box.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC) { QMed.QMedAAC qmedaac = (QMed.QMedAAC)box.mSample.qmed; #if PES datBlock.PESandADTSHeaders = new byte[qmedaac.pesHeader.Length + qmedaac.adtsHeader.Length]; qmedaac.pesHeader.CopyTo(datBlock.PESandADTSHeaders, 0); qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, qmedaac.pesHeader.Length); #else datBlock.PESandADTSHeaders = new byte[qmedaac.adtsHeader.Length]; qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, 0); #endif datBlock.SampleSize += datBlock.PESandADTSHeaders.Length; } #endif if (datBlock.SliceDuration == 0) { datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration); // any non-zero duration is better } if ((((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0) && ((box.mFrameCounter - 1) >= (ulong)inEndSampleIndex)) { boxCount--; break; // don't put last IFrame box in return list } retList.Add(datBlock); if (box == _qBoxes.Last()) { base.GetNextBatch(GenericMediaStream.MAX_BOXES_TO_READ, 0); // callee should set end FIXME: is box.mCurrentPosition being set? } } // end of for loop _qBoxes.RemoveRange(0, boxCount); return(retList); }
/// <summary> /// PrepareSampleReading /// In MP4, reading of box headers is separate from reading of the H264 and audio bits. This is because the bits are stored /// in a different place in the file (or may in fact be in a separate file). In a QBox file, however, both headers and bits /// are stored in the qbox. It makes no sense to separate the two. Therefore, in this implementation of PrepareSampleReading, /// we actually read the bits together with the headers. The routine WriteSamples doesn't do much. /// /// There are two signatures for this method: one that accepts qbox indices (this one), and another that accepts ulong start /// and end times. /// /// We don't keep the qboxes. QBoxes already processed are disposed of as a last step. If we run out of qboxes, we read-in /// more. /// </summary> /// <param name="inStartSampleIndex">int index to first qbox to be processed</param> /// <param name="inEndSampleIndex">int index to last qbox to be processed</param> /// <param name="dummy">not used</param> /// <returns></returns> public override List<StreamDataBlockInfo> PrepareSampleReading(int inStartSampleIndex, int inEndSampleIndex, ref ulong dummy) { List<StreamDataBlockInfo> retList = new List<StreamDataBlockInfo>(); if (_qBoxes.Count == 0) return retList; float scaleFactor = TimeSpan.FromSeconds(1.0).Ticks/this.TimeScale; bool foundFirstSlice = false; int boxCount = 0; // we traverse the _qBoxes list from the beginning; // can't use foreach because _qBoxes can change; // box.mIndex is NOT the same as index i. // we use a for loop only because we are adding qboxes to _qBoxes as part of the loop for (int i = 0; i < _qBoxes.Count; i++) { QBox box = _qBoxes[i]; boxCount++; // reject qboxes with sample size zero (no data) if (box.mSampleSize == 0) { continue; } // we shouldn't be searching for the first box of interest, because it should always be the first one // it should always be the first one because we threw away all boxes already processed if (((ulong)inStartSampleIndex > (box.mFrameCounter - 1)) || ((!foundFirstSlice) && (((uint) box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0))) { continue; // skip } else if ((ulong)inStartSampleIndex == (box.mFrameCounter - 1)) { foundFirstSlice = true; } else if (!foundFirstSlice) { _qBoxes.Clear(); base.GetNextBatch(0, inStartSampleIndex); // throw new Exception("First IFrame not found"); i = -1; // this gets incremented to zero boxCount = 0; // start all over continue; } StreamDataBlockInfo datBlock = new Slice(); switch (box.SampleStreamTypeString()) { case "AAC": datBlock = new ADTSDataBlockInfo(); datBlock.SliceType = SliceType.AAC; break; case "Q711": case "PCM": datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for PCM break; case "MP2A": datBlock.SliceType = SliceType.MP4A; break; case "Q722": // ADPCM case "Q726": case "Q728": datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for ADPCM break; case "H264": case "H264_SLICE": datBlock = new NaluDelimiterBlockInfo(); if (((uint) box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0) datBlock.SliceType = SliceType.DFrame; else { datBlock.SliceType = SliceType.IFrame; } if ((box.mSample != null) && (box.mSample.v != null)) { NaluDelimiterBlockInfo blockInfo = datBlock as NaluDelimiterBlockInfo; blockInfo.AccessUnitDelimiter = box.mSample.v.aud; } break; case "JPEG": datBlock.SliceType = SliceType.JPEG; break; case "MPEG2_ELEMENTARY": datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for MPEG2 break; case "VIN_STATS_GLOBAL": case "VIN_STATS_MB": case "USER_METADATA": case "DEBUG": default: System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", box.SampleStreamTypeString()); break; } datBlock.CTS = (ulong)((box.mSampleCTS - (box.mStreamDuration - box.mSampleDuration)) * scaleFactor); datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration); if (box.mFrameCounter == 0 && box.mStreamDuration == 0) { datBlock.TimeStampNew = 0; } else if (box.mStreamDuration == 0) { datBlock.TimeStampNew = null; } else { datBlock.TimeStampNew = (ulong) (scaleFactor*(box.mStreamDuration - box.mSampleDuration)); } datBlock.SliceSize = box.mSampleSize; datBlock.index = (int)box.mFrameCounter - 1; // boxCount; // NOTE! For qbox, StreamOffset has a different meaning than in MP4. // Here, StreamOffset is the offset to the qbox itself; whereas in // MP4, StreamOffset is the offset to the H264 payload. // In GenericMediaTrack.GetSample, StreamOffset is used as in MP4, but // this method is overriden by another in QBoxVideoTrack that does not use StreamOffset. // For flashback to work for both MP4 and qbox files, the caching mechanism // is different in MP4 from than in qbox. datBlock.StreamOffset = (ulong) box.mHeaderPosition; // needed for flashback to work // set payload Slice slice = datBlock as Slice; slice.SliceBytes = box.mSample.mPayload; #if ADTS if (box.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC) { QMed.QMedAAC qmedaac = (QMed.QMedAAC)box.mSample.qmed; #if PES datBlock.PESandADTSHeaders = new byte[qmedaac.pesHeader.Length + qmedaac.adtsHeader.Length]; qmedaac.pesHeader.CopyTo(datBlock.PESandADTSHeaders, 0); qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, qmedaac.pesHeader.Length); #else datBlock.PESandADTSHeaders = new byte[qmedaac.adtsHeader.Length]; qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, 0); #endif datBlock.SampleSize += datBlock.PESandADTSHeaders.Length; } #endif if (datBlock.SliceDuration == 0) { datBlock.SliceDuration = (uint) (scaleFactor*box.mSampleDuration); // any non-zero duration is better } if ((((uint) box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0) && ((box.mFrameCounter - 1) >= (ulong)inEndSampleIndex)) { boxCount--; break; // don't put last IFrame box in return list } retList.Add(datBlock); if (box == _qBoxes.Last()) { base.GetNextBatch(GenericMediaStream.MAX_BOXES_TO_READ, 0); // callee should set end FIXME: is box.mCurrentPosition being set? } } // end of for loop _qBoxes.RemoveRange(0, boxCount); return retList; }