public void AddOneSample(StreamDataBlockInfo data) { IndependentAndDisposableSample sample = new IndependentAndDisposableSample(); sample.SampleHasRedundancy = 0; switch (data.SliceType) { case SliceType.WMA: // audio case SliceType.MP4A: case SliceType.AAC: sample.SampleDependsOn = 2; sample.SampleIsDependedOn = 2; break; case SliceType.IFrame: sample.SampleIsDependedOn = 1; sample.SampleDependsOn = 2; break; case SliceType.DFrame: sample.SampleIsDependedOn = 1; sample.SampleDependsOn = 1; break; default: throw new Exception("Invalid sample type, cannot add to independent and disposable sample"); } this.Samples.Add(sample); }
private void GetSliceTypeAndFrameNumFromH264Payload(StreamDataBlockInfo oneFrameInfo) { int count = oneFrameInfo.SliceSize; _reader.BaseStream.Position = (long)oneFrameInfo.StreamOffset; BinaryReader binReader = new BinaryReader(_reader.BaseStream); byte[] buf = _reader.ReadBytes(count); H264Sample sample = new H264Sample(_sps, _pps, count); sample.ParseSample(buf); oneFrameInfo.CTS = (ulong)sample.FrameNum; if (sample.SliceType == SliceTypes.B) { oneFrameInfo.SliceType = SliceType.BFrame; } else if (sample.SliceType == SliceTypes.I) { oneFrameInfo.SliceType = SliceType.IFrame; } else { oneFrameInfo.SliceType = SliceType.DFrame; } }
// IEnumerable<Slice> accessor, assumes that this fragment has been read-in. // index starts at _startIndex (it is the index of the slice in the track, not index in this fragment) public override Slice this[int index] { get { if (index < _startIndex) { return(null); } if (_listOfSampleInfo == null) { return(null); } StreamDataBlockInfo sampleInfo = _listOfSampleInfo[index - _startIndex]; Slice slice = new Slice(); StreamDataBlockInfo sliceInfo = slice as StreamDataBlockInfo; sliceInfo.Copy(sampleInfo); _reader.BaseStream.Position = (long)sampleInfo.StreamOffset; slice.SliceBytes = _reader.ReadBytes(sampleInfo.SliceSize); // return the actual bits from MDAT slice.SliceSize = slice.SliceBytes.Length; return(slice); } set // this can work only with ODT fragments because each fragment is its own file { // if original frag has not been read-in yet, do nothing if (_listOfSampleInfo == null) { return; } FixupBoxesAndMDAT(index, value); } }
public void AddOneSample(StreamDataBlockInfo data, uint timeScale, uint defSize, uint defFlags, ref ulong currMdatOffset) { // at this point the samples List should have been created TrackFragmentRunSample sample = new TrackFragmentRunSample(); sample.SampleCompositionTimeOffset = 0; // FIXME: let's see whether we can get by without setting this composition time offset // careful with overflow ulong product = ((ulong)data.SliceDuration) * ((ulong)timeScale); sample.SampleDuration = (uint)(product / (ulong)TimeSpan.FromSeconds(1.0).Ticks); this.Duration += sample.SampleDuration; if (defFlags == 0) { sample.SampleFlags = 0; // FIXME: we are not setting the sample flags at all } if (defSize == 0) { sample.SampleSize = (uint)data.SliceSize; currMdatOffset += sample.SampleSize; } else { currMdatOffset += defSize; } this.Samples.Add(sample); }
private void GetSliceTypeAndFrameNum(StreamDataBlockInfo oneFrameInfo) { if (_trackType == null) { throw new Exception("Fragment: track type undetermined"); } ; switch (_trackType) { case "avc1": case "vc-1": case "mp4v": GetSliceTypeAndFrameNumFromH264Payload(oneFrameInfo); break; case "mp4a": oneFrameInfo.SliceType = SliceType.MP4A; break; case "wma ": oneFrameInfo.SliceType = SliceType.WMA; break; //case "avc1": // return SampleType.AVC1; // break; default: return; } }
public bool GetSampleStream(List <StreamDataBlockInfo> sampleStream, int inStartSampleIndex, int inEndSampleIndex, ref ulong lastEnd) { if (inStartSampleIndex >= (_startIndex + Length)) { return(false); // requested index is beyond this fragment } _currentFrame = _startIndex; _currentTime = (long)_startTime; foreach (StreamDataBlockInfo sliceData in _listOfSampleInfo) { if (sliceData.index >= inStartSampleIndex) { if ((sliceData.index > inEndSampleIndex) && (sliceData.SliceType != SliceType.DFrame) && (sliceData.SliceType != SliceType.BFrame)) { break; } _currentTime += (long)sliceData.SliceDuration; lastEnd = (ulong)_currentTime; StreamDataBlockInfo sliceCopy = new StreamDataBlockInfo(); sliceCopy.Copy(sliceData); sampleStream.Add(sliceCopy); } _currentFrame++; } return(true); }
/// <summary> /// GetSampleStream /// Assemble the "sampleStream" which is the list of sample records that point to the sample bits in mdat, and also contain duration data, etc. /// Traverse the TrackFragmentRunBox in this fragment to collect the samples. /// The Iteration time span delimited by startTime and endTime may encompass several fragments, in which case this method /// will only put all samples in this one fragment. This method will not fetch the next fragment because that would mean another Fragment class instance. /// It is up to the caller whether to create another Fragment instance and collect samples from the next fragment. /// FIXME: Need to optimize this so that sampleStream is assembled from listOfSampleInfo, instead of reading boxes again. /// </summary> /// <param name="sampleStream">List of sample records to be assembled.</param> /// <param name="timeScale">Sampling rate (samples per second).</param> /// <param name="trackType">Which track does this fragment belong?</param> /// <param name="startTime">Start of Iteration time.</param> /// <param name="endTime">End of Iteration time.</param> /// <param name="lastEnd">Previous endTime.</param> /// <returns></returns> public bool GetSampleStream(List <StreamDataBlockInfo> sampleStream, uint timeScale, string trackType, ulong startTime, ulong endTime, ref ulong lastEnd) { if (startTime > (_startTime + Duration)) { return(false); // requested start time is ahead of this whole fragment } if (_timeScale != timeScale) { return(false); } _currentFrame = _startIndex; _currentTime = (long)_startTime; foreach (StreamDataBlockInfo sliceData in _listOfSampleInfo) { if (sliceData.TimeStampNew >= startTime) { if ((sliceData.TimeStampNew > endTime) && (sliceData.SliceType != SliceType.DFrame) && (sliceData.SliceType != SliceType.BFrame)) { break; } _currentTime += (long)sliceData.SliceDuration; lastEnd = (ulong)_currentTime; StreamDataBlockInfo sliceCopy = new StreamDataBlockInfo(); sliceCopy.Copy(sliceData); sampleStream.Add(sliceCopy); } _currentFrame++; } return(true); }
int track_PrepareMediaHeaders(int sliceIndex) { StreamDataBlockInfo lastData = null; if (sliceIndex == 0) { // hypothesize a BlockSize (we don't know it at this point) BlockSize = 8; return(0); } else if (CurrentSliceList.Count >= BlockSize) { if (BlockSize == 8) { CalculateBlockSizeFromInitialSlices(); if (BlockSize > 8) // if less than or equal to 8, process it { return(0); } } int i = CurrentSliceList.Count - 1; if (CurrentSliceList[i].SliceType == SliceType.IFrame) { lastData = CurrentSliceList[i]; CurrentSliceList.RemoveAt(i); } else if (CurrentSliceList[i].SliceType == SliceType.DFrame) { return(0); // extend current block until next IFrame is found } } else { return(0); // don't set lists (see below) for any other values of sliceIndex } ulong localCurrMDatOffset = track.ParentStream.CurrMDatOffset; track.TrackFormat.PrepareSampleWriting(CurrentSliceList, ref localCurrMDatOffset); track.ParentStream.CurrMDatOffset = localCurrMDatOffset; track.ParentStream.WriteSamples(CurrentSliceList.Cast <Slice>(), track.Codec.CodecType); // second param is ineffective (unnecessary) // when writing to destination file, we only need and use one cache buffer cache[readCache].SampleStreamLocations = new List <StreamDataBlockInfo>(); // last IFrame should be part of next block if (lastData != null) { CurrentSliceList.Add(lastData); return(sliceIndex); } return(sliceIndex + 1); // we only get here if sliceIndex == 0 or slice is not video }
public virtual Slice GetSample(StreamDataBlockInfo SampleInfo) { Slice ans = new Slice(); ans.SliceBytes = new byte[SampleInfo.SliceSize]; //ParentStream.EnterMutex(); ParentStream.Stream.Position = (long)SampleInfo.StreamOffset; // if this GetSample call follows another one, file should be in position ParentStream.Stream.Read(ans.SliceBytes, 0, SampleInfo.SliceSize); //ParentStream.LeaveMutex(); ans.Copy(SampleInfo); return(ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { Slice ans = new Slice(); ans.SliceBytes = new byte[SampleInfo.SliceSize]; //ParentStream.EnterMutex(); ISMVTrackFormat ismvFormat = TrackFormat as ISMVTrackFormat; ismvFormat.boxReader.BaseStream.Position = (long)SampleInfo.StreamOffset; // if this GetSample call follows another one, file should be in position ismvFormat.boxReader.BaseStream.Read(ans.SliceBytes, 0, SampleInfo.SliceSize); //ParentStream.LeaveMutex(); ans.Copy(SampleInfo); return (ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { Slice ans = new Slice(); ans.SliceBytes = new byte[SampleInfo.SliceSize]; //ParentStream.EnterMutex(); ISMVTrackFormat ismvFormat = TrackFormat as ISMVTrackFormat; ismvFormat.boxReader.BaseStream.Position = (long)SampleInfo.StreamOffset; // if this GetSample call follows another one, file should be in position ismvFormat.boxReader.BaseStream.Read(ans.SliceBytes, 0, SampleInfo.SliceSize); //ParentStream.LeaveMutex(); ans.Copy(SampleInfo); return(ans); }
/// <summary> /// UpdateRelevancyScores (with ulong time parameter) /// When this method is called, we have already found the requested slice, and /// readCache is now set to the correct buf. /// </summary> /// <param name="sliceTime"></param> void UpdateRelevancyScores(ulong sliceTime) { if (sliceTime == 0UL) { UpdateRelevancyScores(0); return; } // first, look for the slice in order to get its index StreamDataBlockInfo sliceInfo = CurrentSliceList[0]; // get any slice uint halfOfDuration = (uint)(sliceInfo.SliceDuration / 2); // shift to the right by one sliceInfo = CurrentSliceList.First( s => s.TimeStampNew.HasValue && ((s.TimeStampNew.Value > sliceTime - halfOfDuration) && (s.TimeStampNew.Value < sliceTime + halfOfDuration))); UpdateRelevancyScores(sliceInfo.index); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { int delimiterLength = 0; Slice ans = new Slice(); ans.Copy(SampleInfo); ans.SliceBytes = new byte[SampleInfo.SliceSize]; #if REMOVE_EXTRA_SPS NaluDelimiterBlockInfo blockInfo = SampleInfo as NaluDelimiterBlockInfo; if (blockInfo.AccessUnitDelimiter != null) { delimiterLength = blockInfo.AccessUnitDelimiter.Length + 4; // access unit delimiter length is always 2 ans.SliceBytes[3] = (byte)(delimiterLength - 4); // assume that SliceBytes[0 to 2] are all zeroes, we only need to set LSB blockInfo.AccessUnitDelimiter.CopyTo(ans.SliceBytes, 4); } #endif //ParentStream.Stream.Position = (long)SampleInfo.StreamOffset; // remove empty NALUs (length == 0) // also remove trailing bytes, if any, from each NALU Slice inSlice = SampleInfo as Slice; BinaryReader br = new BinaryReader(new MemoryStream(inSlice.SliceBytes)); //BinaryReader br = new BinaryReader(ParentStream.Stream); int totalSize = SampleInfo.SliceSize - delimiterLength; int offset = delimiterLength; while (totalSize > 4) { ulong naluLen = QBox.BE32(br.ReadUInt32()); if (naluLen > 0UL) { br.BaseStream.Position -= 4; int readLen = (int)naluLen + 4; br.Read(ans.SliceBytes, offset, readLen); offset += readLen; totalSize -= readLen; } else { naluLen = 0; // debugging break point } } return(ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { Slice ans = new Slice(); ans.Copy(SampleInfo); ans.SliceBytes = new byte[SampleInfo.SliceSize]; // read H264 payload for display and processing -- // for display: ParentStream.Stream.Position = (long)ans.StreamOffset; ParentStream.Stream.Read(ans.SliceBytes, 0, ans.SliceSize); // for processing: // (hand-off the payload processing to a separate thread so this method // can return immediately) #if MV_Centerus H264Sample sample = new H264Sample(_sps, _pps, ans.SliceSize); sample.SampleDoneEvent += CompletionCallback; sample.ParseSample(ans.SliceBytes); // async call samples.Add(sample); #endif return (ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { Slice ans = new Slice(); ans.Copy(SampleInfo); ans.SliceBytes = new byte[SampleInfo.SliceSize]; // read H264 payload for display and processing -- // for display: ParentStream.Stream.Position = (long)ans.StreamOffset; ParentStream.Stream.Read(ans.SliceBytes, 0, ans.SliceSize); // for processing: // (hand-off the payload processing to a separate thread so this method // can return immediately) #if MV_Centerus H264Sample sample = new H264Sample(_sps, _pps, ans.SliceSize); sample.SampleDoneEvent += CompletionCallback; sample.ParseSample(ans.SliceBytes); // async call samples.Add(sample); #endif return(ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { int delimiterLength = 0; Slice ans = new Slice(); ans.Copy(SampleInfo); ans.SliceBytes = new byte[SampleInfo.SliceSize]; #if REMOVE_EXTRA_SPS NaluDelimiterBlockInfo blockInfo = SampleInfo as NaluDelimiterBlockInfo; if (blockInfo.AccessUnitDelimiter != null) { delimiterLength = blockInfo.AccessUnitDelimiter.Length + 4; // access unit delimiter length is always 2 ans.SliceBytes[3] = (byte)(delimiterLength - 4); // assume that SliceBytes[0 to 2] are all zeroes, we only need to set LSB blockInfo.AccessUnitDelimiter.CopyTo(ans.SliceBytes, 4); } #endif //ParentStream.Stream.Position = (long)SampleInfo.StreamOffset; // remove empty NALUs (length == 0) // also remove trailing bytes, if any, from each NALU Slice inSlice = SampleInfo as Slice; BinaryReader br = new BinaryReader(new MemoryStream(inSlice.SliceBytes)); //BinaryReader br = new BinaryReader(ParentStream.Stream); int totalSize = SampleInfo.SliceSize - delimiterLength; int offset = delimiterLength; while (totalSize > 4) { ulong naluLen = QBox.BE32(br.ReadUInt32()); if (naluLen > 0UL) { br.BaseStream.Position -= 4; int readLen = (int)naluLen + 4; br.Read(ans.SliceBytes, offset, readLen); offset += readLen; totalSize -= readLen; } else naluLen = 0; // debugging break point } return (ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { ADTSDataBlockInfo adtsInfo = (ADTSDataBlockInfo)SampleInfo; Slice ans = new Slice(); ans.Copy(SampleInfo); #if ADTS if (adtsInfo.PESandADTSHeaders == null) // ADTS header may be absent, in which case we use the normal base.GetSample { return base.GetSample(SampleInfo); } int headerCount = adtsInfo.PESandADTSHeaders.Length; ans.SliceBytes = new byte[adtsInfo.SliceSize]; // SampleSize has already been incremented by length of PES + ADTS header adtsInfo.PESandADTSHeaders.CopyTo(ans.SliceBytes, 0); //if (ParentStream.Stream.Position != (long)adtsInfo.StreamOffset) // ParentStream.Stream.Position = (long)adtsInfo.StreamOffset; // this if statement for debugging: just to be able to put a breakpoint here BinaryReader reader = new BinaryReader(new MemoryStream(adtsInfo.SliceBytes)); //ParentStream.Stream.Read(ans.SliceBytes, headerCount, adtsInfo.SliceSize - headerCount); reader.Read(ans.SliceBytes, headerCount, adtsInfo.SliceSize - headerCount); #else ans.SliceBytes = adtsInfo.SliceBytes; #endif return (ans); }
public override Slice GetSample(StreamDataBlockInfo SampleInfo) { ADTSDataBlockInfo adtsInfo = (ADTSDataBlockInfo)SampleInfo; Slice ans = new Slice(); ans.Copy(SampleInfo); #if ADTS if (adtsInfo.PESandADTSHeaders == null) // ADTS header may be absent, in which case we use the normal base.GetSample { return(base.GetSample(SampleInfo)); } int headerCount = adtsInfo.PESandADTSHeaders.Length; ans.SliceBytes = new byte[adtsInfo.SliceSize]; // SampleSize has already been incremented by length of PES + ADTS header adtsInfo.PESandADTSHeaders.CopyTo(ans.SliceBytes, 0); //if (ParentStream.Stream.Position != (long)adtsInfo.StreamOffset) // ParentStream.Stream.Position = (long)adtsInfo.StreamOffset; // this if statement for debugging: just to be able to put a breakpoint here BinaryReader reader = new BinaryReader(new MemoryStream(adtsInfo.SliceBytes)); //ParentStream.Stream.Read(ans.SliceBytes, headerCount, adtsInfo.SliceSize - headerCount); reader.Read(ans.SliceBytes, headerCount, adtsInfo.SliceSize - headerCount); #else ans.SliceBytes = adtsInfo.SliceBytes; #endif return(ans); }
/// <summary> /// InitSampleStreamFromSampleTableBox /// The idea is to collect information on slices starting from startindex to endIndex. /// This is a fairly complex method that traverses all boxes read from this SampleTableBox. /// </summary> /// <param name="sampleTimeScale">uint - sample time scale</param> /// <param name="startIndex">int - start index</param> /// <param name="endIndex">int - end index</param> /// <param name="lastEnd">ref ulong</param> /// <returns></returns> public List<StreamDataBlockInfo> InitSampleStreamFromSampleTableBox(uint sampleTimeScale, int startIndex, int endIndex, ref ulong lastEnd) { List<StreamDataBlockInfo> SampleStreamLocations = new List<StreamDataBlockInfo>(); // local vars DecodingTimeToSampleBox stts = this.DecodingTimeToSampleBox; if (stts == null) throw new Exception("SampleTableBox.DecodingTimeToSampleBox missing for track"); uint sampleCount = 0; ulong timeT = 0; ulong currScaledT = 0; ulong prevScaledT = 0; ulong endT = 0; uint[] counts = stts.SampleCount; uint[] deltaTimes = stts.SampleDelta; ulong currOffset = 0UL; SampleSizeBox stsz = this.SampleSizeBox; uint sampleSize = stsz.SampleSize; uint totalSamples = stsz.SampleCount; uint[] sizeArray = stsz.SampleSizeArray; int sampleCountInList = 0; if ((this.SampleDescriptionsBox == null) || (this.SampleDescriptionsBox.EntryCount != 1)) throw new Exception("SampleTableBox.SampleDescriptionsBox error"); BoxType sampleDescriptionBoxType = this.SampleDescriptionsBox.Entries[0].Type; // initialize (set) cttsIndex to the value that corresponds to startIndex int k = 0; int cttsIndex = 0; if (CompositionTimeToSample != null && CompositionTimeToSample.EntryCount > 0) { int sliceIndex = 1; while (cttsIndex < CompositionTimeToSample.SampleOffset.Length) { if (sliceIndex == startIndex) break; k++; if (k == CompositionTimeToSample.SampleCount[cttsIndex]) { k = 0; // begin counting from zero again cttsIndex++; } sliceIndex++; } } for (int i = 0; i < stts.EntryCount; i++) { for (int j = 0; j < counts[i]; j++) { currScaledT = (ulong)((timeT * (ulong)TimeSpan.FromSeconds(1.0).Ticks) / sampleTimeScale); if ((sampleCount + 1 >= startIndex) && (sampleCount + 1 <= endIndex)) { StreamDataBlockInfo data = new StreamDataBlockInfo(); data.index = (int)sampleCount; data.TimeStampNew = currScaledT; data.SliceDuration = (uint)((deltaTimes[i] * TimeSpan.TicksPerSecond) / sampleTimeScale) + 1; data.SliceSize = (int)sampleSize; if (sampleSize == 0) data.SliceSize = (int)sizeArray[sampleCount]; data.StreamOffset = (ulong)this.SampleToChunkBox.GetFileOffset((uint)(sampleCount + 1)); data.SliceType = sampleDescriptionBoxType == BoxTypes.Mp4a ? SliceType.MP4A : ((this.SyncSampleMapBox == null) || this.SyncSampleMapBox.IsIFrame(sampleCount + 1) ? SliceType.IFrame : SliceType.DFrame); // if necessary, increment cttsIndex if (CompositionTimeToSample != null && CompositionTimeToSample.EntryCount > 0) { k++; data.NonQuickTimeCTTS = CompositionTimeToSample.SampleOffset[cttsIndex]; if (k == CompositionTimeToSample.SampleCount[cttsIndex]) { k = 0; // begin counting from zero again cttsIndex++; } } SampleStreamLocations.Add(data); sampleCountInList++; } if (sampleCount + 1 > endIndex) { endT = prevScaledT; break; } // close of if (currScaledT > endTimeJustBeforeIFrame) // keep track of offset if (sampleSize > 0) currOffset += sampleSize; else { if (sampleCount > totalSamples) throw new Exception("SampleTableBox error: sample count inconsistency bet. stts and stsz"); currOffset += sizeArray[sampleCount]; } prevScaledT = currScaledT; timeT += deltaTimes[i]; sampleCount++; } // end of for j if (endT > 0UL) // end sample found break; } // end of for i if (endT == 0UL) // if we did not find end, endTime would not be set lastEnd = currScaledT; else lastEnd = endT; return SampleStreamLocations; }
private void GetSliceTypeAndFrameNumFromH264Payload(StreamDataBlockInfo oneFrameInfo) { int count = oneFrameInfo.SliceSize; _reader.BaseStream.Position = (long)oneFrameInfo.StreamOffset; BinaryReader binReader = new BinaryReader(_reader.BaseStream); byte[] buf = _reader.ReadBytes(count); H264Sample sample = new H264Sample(_sps, _pps, count); sample.ParseSample(buf); oneFrameInfo.CTS = (ulong)sample.FrameNum; if (sample.SliceType == SliceTypes.B) oneFrameInfo.SliceType = SliceType.BFrame; else if (sample.SliceType == SliceTypes.I) oneFrameInfo.SliceType = SliceType.IFrame; else oneFrameInfo.SliceType = SliceType.DFrame; }
private void GetSliceTypeAndFrameNum(StreamDataBlockInfo oneFrameInfo) { if (_trackType == null) throw new Exception("Fragment: track type undetermined"); ; switch (_trackType) { case "avc1": case "vc-1": case "mp4v": GetSliceTypeAndFrameNumFromH264Payload(oneFrameInfo); break; case "mp4a": oneFrameInfo.SliceType = SliceType.MP4A; break; case "wma ": oneFrameInfo.SliceType = SliceType.WMA; break; //case "avc1": // return SampleType.AVC1; // break; default: return; } }
/// <summary> /// GetOneSample /// Get one frame or sample from the moof structures. There is no stss in a fragment, so we need to determine /// whether a frame is an IFrame or not by examining the IndependentAndDisposableSamplesBox. /// </summary> /// <param name="samples"></param> /// <param name="tfhd"></param> /// <returns></returns> private StreamDataBlockInfo GetOneSample(List<TrackFragmentRunSample> samples, TrackFragmentHeaderBox tfhd) { if (_currentFrame == samples.Count + _startIndex) return null; uint fixedFrameSizeInBytes = tfhd.DefaultSampleSize; if (fixedFrameSizeInBytes == 0) { fixedFrameSizeInBytes = samples[_currentFrame - _startIndex].SampleSize; } if (fixedFrameSizeInBytes == 0) // if it's still zero, then we have a problem throw new Exception("Sample size zero"); // is there enough data left to read the next frame? if (this._baseDataOffset + _currentOffsetInBytes + fixedFrameSizeInBytes > (ulong)_reader.BaseStream.Length) return null; // currently DRM is not yet supported in this GetFrame routine, unlike the FragmentedMp4ParserImplementation // if ((this.m_drmIVOffsets != null) && (this.m_numDrmIVs > this.m_frameIndex)) // { // length = this.m_drmIVSizes[this.m_frameIndex]; // destinationArray = new byte[length]; // Array.Copy(this.m_headerBuffer, this.m_drmIVOffsets[this.m_frameIndex], destinationArray, 0, length); // } uint fixedDuration = tfhd.DefaultSampleDuration; if (samples[_currentFrame - _startIndex].SampleDuration != 0) { fixedDuration = samples[_currentFrame - _startIndex].SampleDuration; } if (_timeScale > 0) // time scale is 1 for ODS assets { // scale time fixedDuration = (uint)TimeArithmetic.ConvertToStandardUnit(_timeScale, fixedDuration); } StreamDataBlockInfo oneFrameData = new StreamDataBlockInfo(); //RawFrameData ans = new RawFrameData(CurrentTime, currentOffsetInBytes, fixedFrameSizeInBytes, fixedDuration, destinationArray); oneFrameData.SliceDuration = fixedDuration; oneFrameData.SliceSize = (int)fixedFrameSizeInBytes; oneFrameData.StreamOffset = this._baseDataOffset + _currentOffsetInBytes; GetSliceTypeAndFrameNum(oneFrameData); // for ISM, TimeStampNew will always have a value oneFrameData.TimeStampNew = (ulong)_currentTime; oneFrameData.index = _currentFrame; _currentOffsetInBytes += fixedFrameSizeInBytes; _currentTime += fixedDuration; _currentFrame++; return oneFrameData; }
public bool GetSampleStream(List<StreamDataBlockInfo> sampleStream, int inStartSampleIndex, int inEndSampleIndex, ref ulong lastEnd) { if (inStartSampleIndex >= (_startIndex + Length)) return false; // requested index is beyond this fragment _currentFrame = _startIndex; _currentTime = (long)_startTime; foreach (StreamDataBlockInfo sliceData in _listOfSampleInfo) { if (sliceData.index >= inStartSampleIndex) { if ((sliceData.index > inEndSampleIndex) && (sliceData.SliceType != SliceType.DFrame) && (sliceData.SliceType != SliceType.BFrame)) { break; } _currentTime += (long)sliceData.SliceDuration; lastEnd = (ulong)_currentTime; StreamDataBlockInfo sliceCopy = new StreamDataBlockInfo(); sliceCopy.Copy(sliceData); sampleStream.Add(sliceCopy); } _currentFrame++; } return (true); }
/// <summary> /// GetSampleStream /// Assemble the "sampleStream" which is the list of sample records that point to the sample bits in mdat, and also contain duration data, etc. /// Traverse the TrackFragmentRunBox in this fragment to collect the samples. /// The Iteration time span delimited by startTime and endTime may encompass several fragments, in which case this method /// will only put all samples in this one fragment. This method will not fetch the next fragment because that would mean another Fragment class instance. /// It is up to the caller whether to create another Fragment instance and collect samples from the next fragment. /// FIXME: Need to optimize this so that sampleStream is assembled from listOfSampleInfo, instead of reading boxes again. /// </summary> /// <param name="sampleStream">List of sample records to be assembled.</param> /// <param name="timeScale">Sampling rate (samples per second).</param> /// <param name="trackType">Which track does this fragment belong?</param> /// <param name="startTime">Start of Iteration time.</param> /// <param name="endTime">End of Iteration time.</param> /// <param name="lastEnd">Previous endTime.</param> /// <returns></returns> public bool GetSampleStream(List<StreamDataBlockInfo> sampleStream, uint timeScale, string trackType, ulong startTime, ulong endTime, ref ulong lastEnd) { if (startTime > (_startTime + Duration)) return false; // requested start time is ahead of this whole fragment if (_timeScale != timeScale) return false; _currentFrame = _startIndex; _currentTime = (long)_startTime; foreach (StreamDataBlockInfo sliceData in _listOfSampleInfo) { if (sliceData.TimeStampNew >= startTime) { if ((sliceData.TimeStampNew > endTime) && (sliceData.SliceType != SliceType.DFrame) && (sliceData.SliceType != SliceType.BFrame)) { break; } _currentTime += (long)sliceData.SliceDuration; lastEnd = (ulong)_currentTime; StreamDataBlockInfo sliceCopy = new StreamDataBlockInfo(); sliceCopy.Copy(sliceData); sampleStream.Add(sliceCopy); } _currentFrame++; } return (true); }
/// <summary> /// GetOneSample /// Get one frame or sample from the moof structures. There is no stss in a fragment, so we need to determine /// whether a frame is an IFrame or not by examining the IndependentAndDisposableSamplesBox. /// </summary> /// <param name="samples"></param> /// <param name="tfhd"></param> /// <returns></returns> private StreamDataBlockInfo GetOneSample(List <TrackFragmentRunSample> samples, TrackFragmentHeaderBox tfhd) { if (_currentFrame == samples.Count + _startIndex) { return(null); } uint fixedFrameSizeInBytes = tfhd.DefaultSampleSize; if (fixedFrameSizeInBytes == 0) { fixedFrameSizeInBytes = samples[_currentFrame - _startIndex].SampleSize; } if (fixedFrameSizeInBytes == 0) // if it's still zero, then we have a problem { throw new Exception("Sample size zero"); } // is there enough data left to read the next frame? if (this._baseDataOffset + _currentOffsetInBytes + fixedFrameSizeInBytes > (ulong)_reader.BaseStream.Length) { return(null); } // currently DRM is not yet supported in this GetFrame routine, unlike the FragmentedMp4ParserImplementation // if ((this.m_drmIVOffsets != null) && (this.m_numDrmIVs > this.m_frameIndex)) // { // length = this.m_drmIVSizes[this.m_frameIndex]; // destinationArray = new byte[length]; // Array.Copy(this.m_headerBuffer, this.m_drmIVOffsets[this.m_frameIndex], destinationArray, 0, length); // } uint fixedDuration = tfhd.DefaultSampleDuration; if (samples[_currentFrame - _startIndex].SampleDuration != 0) { fixedDuration = samples[_currentFrame - _startIndex].SampleDuration; } if (_timeScale > 0) // time scale is 1 for ODS assets { // scale time fixedDuration = (uint)TimeArithmetic.ConvertToStandardUnit(_timeScale, fixedDuration); } StreamDataBlockInfo oneFrameData = new StreamDataBlockInfo(); //RawFrameData ans = new RawFrameData(CurrentTime, currentOffsetInBytes, fixedFrameSizeInBytes, fixedDuration, destinationArray); oneFrameData.SliceDuration = fixedDuration; oneFrameData.SliceSize = (int)fixedFrameSizeInBytes; oneFrameData.StreamOffset = this._baseDataOffset + _currentOffsetInBytes; GetSliceTypeAndFrameNum(oneFrameData); // for ISM, TimeStampNew will always have a value oneFrameData.TimeStampNew = (ulong)_currentTime; oneFrameData.index = _currentFrame; _currentOffsetInBytes += fixedFrameSizeInBytes; _currentTime += fixedDuration; _currentFrame++; return(oneFrameData); }
public void AddOneSample(StreamDataBlockInfo data, uint timeScale, uint defSize, uint defFlags, ref ulong currMdatOffset) { // at this point the samples List should have been created TrackFragmentRunSample sample = new TrackFragmentRunSample(); sample.SampleCompositionTimeOffset = 0; // FIXME: let's see whether we can get by without setting this composition time offset // careful with overflow ulong product = ((ulong)data.SliceDuration) * ((ulong)timeScale); sample.SampleDuration = (uint)(product / (ulong)TimeSpan.FromSeconds(1.0).Ticks); this.Duration += sample.SampleDuration; if (defFlags == 0) sample.SampleFlags = 0; // FIXME: we are not setting the sample flags at all if (defSize == 0) { sample.SampleSize = (uint)data.SliceSize; currMdatOffset += sample.SampleSize; } else currMdatOffset += defSize; this.Samples.Add(sample); }
/// <summary> /// InitSampleStreamFromSampleTableBox /// The idea is to collect information on slices starting from startindex to endIndex. /// This is a fairly complex method that traverses all boxes read from this SampleTableBox. /// </summary> /// <param name="sampleTimeScale">uint - sample time scale</param> /// <param name="startIndex">int - start index</param> /// <param name="endIndex">int - end index</param> /// <param name="lastEnd">ref ulong</param> /// <returns></returns> public List <StreamDataBlockInfo> InitSampleStreamFromSampleTableBox(uint sampleTimeScale, int startIndex, int endIndex, ref ulong lastEnd) { List <StreamDataBlockInfo> SampleStreamLocations = new List <StreamDataBlockInfo>(); // local vars DecodingTimeToSampleBox stts = this.DecodingTimeToSampleBox; if (stts == null) { throw new Exception("SampleTableBox.DecodingTimeToSampleBox missing for track"); } uint sampleCount = 0; ulong timeT = 0; ulong currScaledT = 0; ulong prevScaledT = 0; ulong endT = 0; uint[] counts = stts.SampleCount; uint[] deltaTimes = stts.SampleDelta; ulong currOffset = 0UL; SampleSizeBox stsz = this.SampleSizeBox; uint sampleSize = stsz.SampleSize; uint totalSamples = stsz.SampleCount; uint[] sizeArray = stsz.SampleSizeArray; int sampleCountInList = 0; if ((this.SampleDescriptionsBox == null) || (this.SampleDescriptionsBox.EntryCount != 1)) { throw new Exception("SampleTableBox.SampleDescriptionsBox error"); } BoxType sampleDescriptionBoxType = this.SampleDescriptionsBox.Entries[0].Type; // initialize (set) cttsIndex to the value that corresponds to startIndex int k = 0; int cttsIndex = 0; if (CompositionTimeToSample != null && CompositionTimeToSample.EntryCount > 0) { int sliceIndex = 1; while (cttsIndex < CompositionTimeToSample.SampleOffset.Length) { if (sliceIndex == startIndex) { break; } k++; if (k == CompositionTimeToSample.SampleCount[cttsIndex]) { k = 0; // begin counting from zero again cttsIndex++; } sliceIndex++; } } for (int i = 0; i < stts.EntryCount; i++) { for (int j = 0; j < counts[i]; j++) { currScaledT = (ulong)((timeT * (ulong)TimeSpan.FromSeconds(1.0).Ticks) / sampleTimeScale); if ((sampleCount + 1 >= startIndex) && (sampleCount + 1 <= endIndex)) { StreamDataBlockInfo data = new StreamDataBlockInfo(); data.index = (int)sampleCount; data.TimeStampNew = currScaledT; data.SliceDuration = (uint)((deltaTimes[i] * TimeSpan.TicksPerSecond) / sampleTimeScale) + 1; data.SliceSize = (int)sampleSize; if (sampleSize == 0) { data.SliceSize = (int)sizeArray[sampleCount]; } data.StreamOffset = (ulong)this.SampleToChunkBox.GetFileOffset((uint)(sampleCount + 1)); data.SliceType = sampleDescriptionBoxType == BoxTypes.Mp4a ? SliceType.MP4A : ((this.SyncSampleMapBox == null) || this.SyncSampleMapBox.IsIFrame(sampleCount + 1) ? SliceType.IFrame : SliceType.DFrame); // if necessary, increment cttsIndex if (CompositionTimeToSample != null && CompositionTimeToSample.EntryCount > 0) { k++; data.NonQuickTimeCTTS = CompositionTimeToSample.SampleOffset[cttsIndex]; if (k == CompositionTimeToSample.SampleCount[cttsIndex]) { k = 0; // begin counting from zero again cttsIndex++; } } SampleStreamLocations.Add(data); sampleCountInList++; } if (sampleCount + 1 > endIndex) { endT = prevScaledT; break; } // close of if (currScaledT > endTimeJustBeforeIFrame) // keep track of offset if (sampleSize > 0) { currOffset += sampleSize; } else { if (sampleCount > totalSamples) { throw new Exception("SampleTableBox error: sample count inconsistency bet. stts and stsz"); } currOffset += sizeArray[sampleCount]; } prevScaledT = currScaledT; timeT += deltaTimes[i]; sampleCount++; } // end of for j if (endT > 0UL) // end sample found { break; } } // end of for i if (endT == 0UL) // if we did not find end, endTime would not be set { lastEnd = currScaledT; } else { lastEnd = endT; } return(SampleStreamLocations); }