Exemple #1
0
        //nbl; removed as we shouldn't 'fix' bframe time stamps
        //private Dictionary<ushort, ulong> PrevTimeStamps = new Dictionary<ushort, ulong>();
        //private Dictionary<ushort, int> PrevIndices = new Dictionary<ushort, int>();
        public override void LazyRead(int requestedBoxCount)
        {
            QBox qbox = null;
              int i = 0;
              int boxCount = 0;

              lock (_binaryReader.BaseStream) {
            // clear out all qbox lists
            // we expect the payload buffers to stay intact because these are now referenced in Slices
            _audioBoxes.Clear();
            _videoBoxes.Clear();

            while ((boxCount < requestedBoxCount) && (_binaryReader.BaseStream.Position < _binaryReader.BaseStream.Length)) {
              try {
            qbox = new QBox();
            qbox.Read(_binaryReader);
            if (MediaTimes[qbox.mSampleStreamId] == null)
              MediaTimes[qbox.mSampleStreamId] = new MediaTimeUtils();
            MediaTimes[qbox.mSampleStreamId].SetClockRate(((qbox.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_120HZ_CLOCK) != 0U));

            //nbl; we can't fill in duration for bframes as this doesn't make sense... the CTTS info is presentation time used for mp4 stuff
            //qbox.FixTimeStamp(PrevTimeStamps, PrevIndices);  // <---- Kludge! Some qboxes may have mStreamDuration reset, fix it here

            boxCount++;
              }
            // for the moment we catch two different exceptions, yet all we do is break our while loop
              catch (EndOfStreamException eos) {
            string msg = eos.Message;
            break;
              } catch (Exception ex) {
            throw ex;
              }

              switch (qbox.SampleStreamTypeString()) {
            case "AAC":
            case "PCM":
            case "MP2A":
            case "Q711":
            case "Q722":
            case "Q726":
            case "Q728":
              _audioBoxes.Add(qbox);
              break;
            case "H264":
            case "H264_SLICE":
            case "JPEG":
            case "MPEG2_ELEMENTARY":
              if (!_videoTrackIDs.Contains(qbox.mSampleStreamId)) {
                _videoTrackIDs.Add(qbox.mSampleStreamId);
              }

              _videoBoxes.Add(qbox);
              break;
            case "VIN_STATS_GLOBAL":
            case "VIN_STATS_MB":
            case "USER_METADATA":
            case "DEBUG":
            default:
              System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", qbox.SampleStreamTypeString());
              break;
              }

              i++;
            } // end of while

              }

              // define the tracks, if we haven't already
              // note that for qboxes, we really only care about formats (QBoxTrackFormat), and tracks are just generic.
              if (MediaTracks.Count == 0 && qbox != null) {
            if (_audioBoxes.Count > 0) {
              ushort audioTrackID = _audioBoxes[0].mSampleStreamId;
              QBoxTrackFormat audioTrackFormat = new QBoxTrackFormat(_audioBoxes, audioTrackID, MediaTimes[audioTrackID]);
              QBoxAudioTrack audioTrack = new QBoxAudioTrack(audioTrackFormat, this);
              //          audioTrack.NextIndexToRead = tempIndices[audioTrackID];
              //GenericAudioTrack audioTrack = new GenericAudioTrack(audioTrackFormat, this);
              //this.Duration = audioTrack.TrackDuration;
              //this.TimeScale = (uint)audioTrack.SampleRate;
              base.AddTrack(audioTrack);
            }

            foreach (ushort trackID in _videoTrackIDs) {
              QBoxTrackFormat videoTrackFormat = new QBoxTrackFormat(_videoBoxes, trackID, MediaTimes[trackID]);
              QBoxVideoTrack videoTrack = new QBoxVideoTrack(videoTrackFormat, this);
              videoTrack.NextIndexToRead = (int) (qbox.mBoxContinuityCounter + 1);
              if (DurationIn100NanoSecs < videoTrack.TrackDurationIn100NanoSecs)
            this.DurationIn100NanoSecs = videoTrack.TrackDurationIn100NanoSecs;
              //this.TimeScale = videoTrack.TrackFormat.TimeScale;
              base.AddTrack(videoTrack);
            }
              } else if (_audioBoxes.Count > 0 && _videoBoxes.Count > 0) {
            // add qboxes to existing track formats
            foreach (GenericMediaTrack track in this.MediaTracks) {
              QBoxTrackFormat format = track.TrackFormat as QBoxTrackFormat;
              if (track is GenericAudioTrack) {
            format.AddMore(_audioBoxes);
              } else {
            format.AddMore(_videoBoxes);
              }
            }
              }

              if (currStreamLength < Stream.Length) {
            currStreamLength = Stream.Length;
            // if the duration we're getting from the last audio qbox is shorter than we already have, then don't bother
            ulong liveDuration = (ulong) GetDurationFromLastQBox(); // seek all the way forward and back, just to determine duration
            if (liveDuration > DurationIn100NanoSecs)
              DurationIn100NanoSecs = liveDuration;
            // might as well set audio and video durations
            foreach (IMediaTrack track in MediaTracks)
              track.TrackDurationIn100NanoSecs = DurationIn100NanoSecs;
              }
        }
Exemple #2
0
//nbl; removed as we shouldn't 'fix' bframe time stamps
        //private Dictionary<ushort, ulong> PrevTimeStamps = new Dictionary<ushort, ulong>();
        //private Dictionary<ushort, int> PrevIndices = new Dictionary<ushort, int>();

        public override void LazyRead(int requestedBoxCount)
        {
            QBox qbox     = null;
            int  i        = 0;
            int  boxCount = 0;

            lock (_binaryReader.BaseStream) {
                // clear out all qbox lists
                // we expect the payload buffers to stay intact because these are now referenced in Slices
                _audioBoxes.Clear();
                _videoBoxes.Clear();

                while ((boxCount < requestedBoxCount) && (_binaryReader.BaseStream.Position < _binaryReader.BaseStream.Length))
                {
                    try {
                        qbox = new QBox();
                        qbox.Read(_binaryReader);
                        if (MediaTimes[qbox.mSampleStreamId] == null)
                        {
                            MediaTimes[qbox.mSampleStreamId] = new MediaTimeUtils();
                        }
                        MediaTimes[qbox.mSampleStreamId].SetClockRate(((qbox.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_120HZ_CLOCK) != 0U));

//nbl; we can't fill in duration for bframes as this doesn't make sense... the CTTS info is presentation time used for mp4 stuff
//qbox.FixTimeStamp(PrevTimeStamps, PrevIndices);  // <---- Kludge! Some qboxes may have mStreamDuration reset, fix it here

                        boxCount++;
                    }
                    // for the moment we catch two different exceptions, yet all we do is break our while loop
                    catch (EndOfStreamException eos) {
                        string msg = eos.Message;
                        break;
                    } catch (Exception ex) {
                        throw ex;
                    }

                    switch (qbox.SampleStreamTypeString())
                    {
                    case "AAC":
                    case "PCM":
                    case "MP2A":
                    case "Q711":
                    case "Q722":
                    case "Q726":
                    case "Q728":
                        _audioBoxes.Add(qbox);
                        break;

                    case "H264":
                    case "H264_SLICE":
                    case "JPEG":
                    case "MPEG2_ELEMENTARY":
                        if (!_videoTrackIDs.Contains(qbox.mSampleStreamId))
                        {
                            _videoTrackIDs.Add(qbox.mSampleStreamId);
                        }

                        _videoBoxes.Add(qbox);
                        break;

                    case "VIN_STATS_GLOBAL":
                    case "VIN_STATS_MB":
                    case "USER_METADATA":
                    case "DEBUG":
                    default:
                        System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", qbox.SampleStreamTypeString());
                        break;
                    }

                    i++;
                } // end of while
            }

            // define the tracks, if we haven't already
            // note that for qboxes, we really only care about formats (QBoxTrackFormat), and tracks are just generic.
            if (MediaTracks.Count == 0 && qbox != null)
            {
                if (_audioBoxes.Count > 0)
                {
                    ushort          audioTrackID     = _audioBoxes[0].mSampleStreamId;
                    QBoxTrackFormat audioTrackFormat = new QBoxTrackFormat(_audioBoxes, audioTrackID, MediaTimes[audioTrackID]);
                    QBoxAudioTrack  audioTrack       = new QBoxAudioTrack(audioTrackFormat, this);
                    //          audioTrack.NextIndexToRead = tempIndices[audioTrackID];
                    //GenericAudioTrack audioTrack = new GenericAudioTrack(audioTrackFormat, this);
                    //this.Duration = audioTrack.TrackDuration;
                    //this.TimeScale = (uint)audioTrack.SampleRate;
                    base.AddTrack(audioTrack);
                }

                foreach (ushort trackID in _videoTrackIDs)
                {
                    QBoxTrackFormat videoTrackFormat = new QBoxTrackFormat(_videoBoxes, trackID, MediaTimes[trackID]);
                    QBoxVideoTrack  videoTrack       = new QBoxVideoTrack(videoTrackFormat, this);
                    videoTrack.NextIndexToRead = (int)(qbox.mBoxContinuityCounter + 1);
                    if (DurationIn100NanoSecs < videoTrack.TrackDurationIn100NanoSecs)
                    {
                        this.DurationIn100NanoSecs = videoTrack.TrackDurationIn100NanoSecs;
                    }
                    //this.TimeScale = videoTrack.TrackFormat.TimeScale;
                    base.AddTrack(videoTrack);
                }
            }
            else if (_audioBoxes.Count > 0 && _videoBoxes.Count > 0)
            {
                // add qboxes to existing track formats
                foreach (GenericMediaTrack track in this.MediaTracks)
                {
                    QBoxTrackFormat format = track.TrackFormat as QBoxTrackFormat;
                    if (track is GenericAudioTrack)
                    {
                        format.AddMore(_audioBoxes);
                    }
                    else
                    {
                        format.AddMore(_videoBoxes);
                    }
                }
            }

            if (currStreamLength < Stream.Length)
            {
                currStreamLength = Stream.Length;
                // if the duration we're getting from the last audio qbox is shorter than we already have, then don't bother
                ulong liveDuration = (ulong)GetDurationFromLastQBox(); // seek all the way forward and back, just to determine duration
                if (liveDuration > DurationIn100NanoSecs)
                {
                    DurationIn100NanoSecs = liveDuration;
                }
                // might as well set audio and video durations
                foreach (IMediaTrack track in MediaTracks)
                {
                    track.TrackDurationIn100NanoSecs = DurationIn100NanoSecs;
                }
            }
        }