コード例 #1
0
        /// <summary>
        /// NearEnd
        /// For every IFrame Qbox, check whether this is the last IFrame before the end of this run.
        /// If it is, don't include this IFrame in the current run; it will be the first QBox in the NEXT run.
        /// NOTE: This is not used anywhere.
        /// </summary>
        /// <param name="boxCount"></param>
        /// <param name="inEndSampleTime"></param>
        /// <param name="lastEnd"></param>
        /// <returns></returns>
        private bool NearEnd(int boxCount, UInt64 inEndSampleTime, ulong lastEnd, float scaleFactor)
        {
            if (inEndSampleTime < lastEnd)
            {
                return(true);
            }

            if (boxCount >= _qBoxes.Count) // it is not near the end, it's AT the end
            {
                return(false);
            }

            int   index     = boxCount + 1;
            ulong blockTime = lastEnd;
            QBox  box       = _qBoxes[index];

            while (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0)
            {
                string streamType = box.SampleStreamTypeString();
                if (streamType == "H264")
                {
                    blockTime += (ulong)(scaleFactor * box.mSampleDuration);
                }
                else if (streamType == "AAC")
                {
                    blockTime = (ulong)(scaleFactor * box.mSampleCTS);
                }
                else
                {
                    throw new Exception(string.Format("Unsupported qbox stream type: {0}", streamType));
                }

                if (inEndSampleTime < blockTime)
                {
                    return(true);
                }
                index++;
                if (index == _qBoxes.Count)
                {
                    return(false);
                }
                box = _qBoxes[index];
            }
            return(false);
        }
コード例 #2
0
        /// <summary>
        /// PrepareSampleReading
        /// In MP4, reading of box headers is separate from reading of the H264 and audio bits. This is because the bits are stored
        /// in a different place in the file (or may in fact be in a separate file). In a QBox file, however, both headers and bits
        /// are stored in the qbox. It makes no sense to separate the two. Therefore, in this implementation of PrepareSampleReading,
        /// we actually read the bits together with the headers. The routine WriteSamples doesn't do much.
        ///
        /// There are two signatures for this method: one that accepts qbox indices (this one), and another that accepts ulong start
        /// and end times.
        ///
        /// We don't keep the qboxes. QBoxes already processed are disposed of as a last step. If we run out of qboxes, we read-in
        /// more.
        /// </summary>
        /// <param name="inStartSampleIndex">int index to first qbox to be processed</param>
        /// <param name="inEndSampleIndex">int index to last qbox to be processed</param>
        /// <param name="dummy">not used</param>
        /// <returns></returns>
        public override List <StreamDataBlockInfo> PrepareSampleReading(int inStartSampleIndex, int inEndSampleIndex,
                                                                        ref ulong dummy)
        {
            List <StreamDataBlockInfo> retList = new List <StreamDataBlockInfo>();

            if (_qBoxes.Count == 0)
            {
                return(retList);
            }


            float scaleFactor     = TimeSpan.FromSeconds(1.0).Ticks / this.TimeScale;
            bool  foundFirstSlice = false;
            int   boxCount        = 0;

            // we traverse the _qBoxes list from the beginning;
            // can't use foreach because _qBoxes can change;
            // box.mIndex is NOT the same as index i.
            // we use a for loop only because we are adding qboxes to _qBoxes as part of the loop
            for (int i = 0; i < _qBoxes.Count; i++)
            {
                QBox box = _qBoxes[i];
                boxCount++;

                // reject qboxes with sample size zero (no data)
                if (box.mSampleSize == 0)
                {
                    continue;
                }

                // we shouldn't be searching for the first box of interest, because it should always be the first one
                // it should always be the first one because we threw away all boxes already processed
                if (((ulong)inStartSampleIndex > (box.mFrameCounter - 1)) ||
                    ((!foundFirstSlice) && (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0)))
                {
                    continue; // skip
                }
                else if ((ulong)inStartSampleIndex == (box.mFrameCounter - 1))
                {
                    foundFirstSlice = true;
                }
                else if (!foundFirstSlice)
                {
                    _qBoxes.Clear();
                    base.GetNextBatch(0, inStartSampleIndex); // throw new Exception("First IFrame not found");
                    i        = -1;                            // this gets incremented to zero
                    boxCount = 0;                             // start all over
                    continue;
                }

                StreamDataBlockInfo datBlock = new Slice();

                switch (box.SampleStreamTypeString())
                {
                case "AAC":
                    datBlock           = new ADTSDataBlockInfo();
                    datBlock.SliceType = SliceType.AAC;
                    break;

                case "Q711":
                case "PCM":
                    datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for PCM
                    break;

                case "MP2A":
                    datBlock.SliceType = SliceType.MP4A;
                    break;

                case "Q722": // ADPCM
                case "Q726":
                case "Q728":
                    datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for ADPCM
                    break;

                case "H264":
                case "H264_SLICE":
                    datBlock = new NaluDelimiterBlockInfo();
                    if (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0)
                    {
                        datBlock.SliceType = SliceType.DFrame;
                    }
                    else
                    {
                        datBlock.SliceType = SliceType.IFrame;
                    }
                    if ((box.mSample != null) && (box.mSample.v != null))
                    {
                        NaluDelimiterBlockInfo blockInfo = datBlock as NaluDelimiterBlockInfo;
                        blockInfo.AccessUnitDelimiter = box.mSample.v.aud;
                    }
                    break;

                case "JPEG":
                    datBlock.SliceType = SliceType.JPEG;
                    break;

                case "MPEG2_ELEMENTARY":
                    datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for MPEG2
                    break;

                case "VIN_STATS_GLOBAL":
                case "VIN_STATS_MB":
                case "USER_METADATA":
                case "DEBUG":
                default:
                    System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", box.SampleStreamTypeString());
                    break;
                }

                datBlock.CTS           = (ulong)((box.mSampleCTS - (box.mStreamDuration - box.mSampleDuration)) * scaleFactor);
                datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration);
                if (box.mFrameCounter == 0 && box.mStreamDuration == 0)
                {
                    datBlock.TimeStampNew = 0;
                }
                else if (box.mStreamDuration == 0)
                {
                    datBlock.TimeStampNew = null;
                }
                else
                {
                    datBlock.TimeStampNew = (ulong)(scaleFactor * (box.mStreamDuration - box.mSampleDuration));
                }
                datBlock.SliceSize = box.mSampleSize;
                datBlock.index     = (int)box.mFrameCounter - 1; // boxCount;

                // NOTE! For qbox, StreamOffset has a different meaning than in MP4.
                // Here, StreamOffset is the offset to the qbox itself; whereas in
                // MP4, StreamOffset is the offset to the H264 payload.
                // In GenericMediaTrack.GetSample, StreamOffset is used as in MP4, but
                // this method is overriden by another in QBoxVideoTrack that does not use StreamOffset.
                // For flashback to work for both MP4 and qbox files, the caching mechanism
                // is different in MP4 from than in qbox.
                datBlock.StreamOffset = (ulong)box.mHeaderPosition; // needed for flashback to work

                // set payload
                Slice slice = datBlock as Slice;
                slice.SliceBytes = box.mSample.mPayload;

#if ADTS
                if (box.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC)
                {
                    QMed.QMedAAC qmedaac = (QMed.QMedAAC)box.mSample.qmed;
#if PES
                    datBlock.PESandADTSHeaders = new byte[qmedaac.pesHeader.Length + qmedaac.adtsHeader.Length];
                    qmedaac.pesHeader.CopyTo(datBlock.PESandADTSHeaders, 0);
                    qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, qmedaac.pesHeader.Length);
#else
                    datBlock.PESandADTSHeaders = new byte[qmedaac.adtsHeader.Length];
                    qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, 0);
#endif
                    datBlock.SampleSize += datBlock.PESandADTSHeaders.Length;
                }
#endif
                if (datBlock.SliceDuration == 0)
                {
                    datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration); // any non-zero duration is better
                }

                if ((((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0) && ((box.mFrameCounter - 1) >= (ulong)inEndSampleIndex))
                {
                    boxCount--;
                    break; // don't put last IFrame box in return list
                }

                retList.Add(datBlock);

                if (box == _qBoxes.Last())
                {
                    base.GetNextBatch(GenericMediaStream.MAX_BOXES_TO_READ, 0);
                    // callee should set end FIXME: is box.mCurrentPosition being set?
                }
            } // end of for loop

            _qBoxes.RemoveRange(0, boxCount);

            return(retList);
        }
コード例 #3
0
ファイル: QBoxStream.cs プロジェクト: Genteure/GPUCyclops
//nbl; removed as we shouldn't 'fix' bframe time stamps
        //private Dictionary<ushort, ulong> PrevTimeStamps = new Dictionary<ushort, ulong>();
        //private Dictionary<ushort, int> PrevIndices = new Dictionary<ushort, int>();

        public override void LazyRead(int requestedBoxCount)
        {
            QBox qbox     = null;
            int  i        = 0;
            int  boxCount = 0;

            lock (_binaryReader.BaseStream) {
                // clear out all qbox lists
                // we expect the payload buffers to stay intact because these are now referenced in Slices
                _audioBoxes.Clear();
                _videoBoxes.Clear();

                while ((boxCount < requestedBoxCount) && (_binaryReader.BaseStream.Position < _binaryReader.BaseStream.Length))
                {
                    try {
                        qbox = new QBox();
                        qbox.Read(_binaryReader);
                        if (MediaTimes[qbox.mSampleStreamId] == null)
                        {
                            MediaTimes[qbox.mSampleStreamId] = new MediaTimeUtils();
                        }
                        MediaTimes[qbox.mSampleStreamId].SetClockRate(((qbox.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_120HZ_CLOCK) != 0U));

//nbl; we can't fill in duration for bframes as this doesn't make sense... the CTTS info is presentation time used for mp4 stuff
//qbox.FixTimeStamp(PrevTimeStamps, PrevIndices);  // <---- Kludge! Some qboxes may have mStreamDuration reset, fix it here

                        boxCount++;
                    }
                    // for the moment we catch two different exceptions, yet all we do is break our while loop
                    catch (EndOfStreamException eos) {
                        string msg = eos.Message;
                        break;
                    } catch (Exception ex) {
                        throw ex;
                    }

                    switch (qbox.SampleStreamTypeString())
                    {
                    case "AAC":
                    case "PCM":
                    case "MP2A":
                    case "Q711":
                    case "Q722":
                    case "Q726":
                    case "Q728":
                        _audioBoxes.Add(qbox);
                        break;

                    case "H264":
                    case "H264_SLICE":
                    case "JPEG":
                    case "MPEG2_ELEMENTARY":
                        if (!_videoTrackIDs.Contains(qbox.mSampleStreamId))
                        {
                            _videoTrackIDs.Add(qbox.mSampleStreamId);
                        }

                        _videoBoxes.Add(qbox);
                        break;

                    case "VIN_STATS_GLOBAL":
                    case "VIN_STATS_MB":
                    case "USER_METADATA":
                    case "DEBUG":
                    default:
                        System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", qbox.SampleStreamTypeString());
                        break;
                    }

                    i++;
                } // end of while
            }

            // define the tracks, if we haven't already
            // note that for qboxes, we really only care about formats (QBoxTrackFormat), and tracks are just generic.
            if (MediaTracks.Count == 0 && qbox != null)
            {
                if (_audioBoxes.Count > 0)
                {
                    ushort          audioTrackID     = _audioBoxes[0].mSampleStreamId;
                    QBoxTrackFormat audioTrackFormat = new QBoxTrackFormat(_audioBoxes, audioTrackID, MediaTimes[audioTrackID]);
                    QBoxAudioTrack  audioTrack       = new QBoxAudioTrack(audioTrackFormat, this);
                    //          audioTrack.NextIndexToRead = tempIndices[audioTrackID];
                    //GenericAudioTrack audioTrack = new GenericAudioTrack(audioTrackFormat, this);
                    //this.Duration = audioTrack.TrackDuration;
                    //this.TimeScale = (uint)audioTrack.SampleRate;
                    base.AddTrack(audioTrack);
                }

                foreach (ushort trackID in _videoTrackIDs)
                {
                    QBoxTrackFormat videoTrackFormat = new QBoxTrackFormat(_videoBoxes, trackID, MediaTimes[trackID]);
                    QBoxVideoTrack  videoTrack       = new QBoxVideoTrack(videoTrackFormat, this);
                    videoTrack.NextIndexToRead = (int)(qbox.mBoxContinuityCounter + 1);
                    if (DurationIn100NanoSecs < videoTrack.TrackDurationIn100NanoSecs)
                    {
                        this.DurationIn100NanoSecs = videoTrack.TrackDurationIn100NanoSecs;
                    }
                    //this.TimeScale = videoTrack.TrackFormat.TimeScale;
                    base.AddTrack(videoTrack);
                }
            }
            else if (_audioBoxes.Count > 0 && _videoBoxes.Count > 0)
            {
                // add qboxes to existing track formats
                foreach (GenericMediaTrack track in this.MediaTracks)
                {
                    QBoxTrackFormat format = track.TrackFormat as QBoxTrackFormat;
                    if (track is GenericAudioTrack)
                    {
                        format.AddMore(_audioBoxes);
                    }
                    else
                    {
                        format.AddMore(_videoBoxes);
                    }
                }
            }

            if (currStreamLength < Stream.Length)
            {
                currStreamLength = Stream.Length;
                // if the duration we're getting from the last audio qbox is shorter than we already have, then don't bother
                ulong liveDuration = (ulong)GetDurationFromLastQBox(); // seek all the way forward and back, just to determine duration
                if (liveDuration > DurationIn100NanoSecs)
                {
                    DurationIn100NanoSecs = liveDuration;
                }
                // might as well set audio and video durations
                foreach (IMediaTrack track in MediaTracks)
                {
                    track.TrackDurationIn100NanoSecs = DurationIn100NanoSecs;
                }
            }
        }