Example #1
0
 /// <summary>
 /// Default constructor.
 /// For writing to a QBox file.
 /// </summary>
 public QBoxTrackFormat()
 {
     firstQB = null;
       audioMetaSample = null;
       videoMetaSample = null;
       DurationIn100NanoSecs = 0;
 }
Example #2
0
        /// <summary>
        /// Constructor accepting a list of qboxes as input.
        /// (For reading a QBox file.)
        /// FIXME: we need to pick up the rest of the tracks (other than the first one)
        /// </summary>
        /// <param name="qboxes"></param>
        public QBoxTrackFormat(List<QBox> qboxes, ushort trackID, MediaTimeUtils mediaTime)
            : this()
        {
            _qBoxes = new List<QBox>();
              qboxes.ForEach(delegate(QBox q) { if (q.mSampleStreamId == trackID) _qBoxes.Add(q); });
              if (_qBoxes.Count == 0)
            throw new Exception(string.Format("There is no track with ID = {0}", trackID));

              _mediaTime = mediaTime;

              HasIFrameBoxes = _qBoxes.Any(box => (((uint) box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0));

              firstQB = _qBoxes[0];
              if (firstQB.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_H264) {
            Codec = new Codec(CodecTypes.Video);
            firstQB = _qBoxes.First(q => ((q.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_CONFIGURATION_INFO) != 0u));
            if (firstQB.mSample.v != null) {
              this.videoMetaSample = firstQB.mSample;
              seqParamSetData = firstQB.mSample.v.sps;
              picParamSetData = firstQB.mSample.v.pps;
              Codec.PrivateCodecData = this.VideoCodecPrivateData;
            }
            else
              Codec.PrivateCodecData = ToHexString(firstQB.mSample.privateCodecData);
              }
              else if (firstQB.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC) {
            Codec = new Codec(CodecTypes.Audio);
            firstQB =
              _qBoxes.First(q => ((q.mSample.a != null) && ((q.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_META_INFO) != 0u)) ||
                             ((q.mSample.qmed != null) && ((q.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_QMED_PRESENT) != 0u)));
            this.audioMetaSample = firstQB.mSample;

            if (audioMetaSample.privateCodecData != null)
              Codec.PrivateCodecData = ToHexString(audioMetaSample.privateCodecData);
            else {
            #if USE_WAVEFORMATEX
              GetAudioPrivateCodecDataFromWaveFormatEx();
            #else
              GetAudioPrivateCodecDataAdHoc();
            #endif
            }
              }
              else if (firstQB.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_JPEG) {
            Codec = new Codec(CodecTypes.Video);
            if (firstQB.mSample.privateCodecData != null)
              Codec.PrivateCodecData = ToHexString(firstQB.mSample.privateCodecData);
              }
              else
            throw new Exception(string.Format("QBox sample type not implemented: {0}", firstQB.mSampleStreamType));
        }
Example #3
0
 // QBoxCompare is not used
 private int QBoxCompare(QBox x, QBox y)
 {
     if (x == null) {
     if (y == null) {
       // If x is null and y is null, they're
       // equal.
       return 0;
     }
     else {
       // If x is null and y is not null, y
       // is greater.
       return -1;
     }
       }
       else {
     // If x is not null...
     //
     if (y == null)
       // ...and y is null, x is greater.
     {
       return 1;
     }
     else {
       // ...and y is not null, compare the
       // time stamps of the two QBoxes.
       //
       return x.mSampleCTS.CompareTo(y.mSampleCTS);
     }
       }
 }
Example #4
0
 public override void PrepareSampleWriting(List<StreamDataBlockInfo> sampleLocations, ref ulong currMdatOffset)
 {
     bool firstSlice = true;
       long oneSecTicks = TimeSpan.FromSeconds(1.0).Ticks;
       _qBoxes.Clear(); // discard boxes from previous batch
       foreach (StreamDataBlockInfo blockInfo in sampleLocations) {
     ulong timeStamp = ((ulong) blockInfo.SliceDuration*(ulong) TimeScale);
     timeStamp /= (ulong) oneSecTicks;
     string trackType = (Codec.CodecType == CodecTypes.Audio)
                      ? "Audio"
                      : (Codec.CodecType == CodecTypes.Video) ? "Video" : "Unknown";
     ulong sampleFlags = ((blockInfo.SliceType == SliceType.IFrame) || (trackType == "Audio" && firstSlice))
                       ? QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT
                       : 0;
     QBox qbox = new QBox(blockInfo.SliceSize, 0u, _prevTimeStamp, trackType, sampleFlags);
     _prevTimeStamp += timeStamp;
     _qBoxes.Add(qbox);
     if (blockInfo.SliceSize > 0)
       // for audio, first two qboxes will be marked for sync because very first one is empty
       firstSlice = false;
       }
       base.PrepareSampleWriting(sampleLocations, ref currMdatOffset); // to update duration
 }
Example #5
0
 /// <summary>
 /// QBoxTrackFormat
 /// Constructor to use when writing out to a stream.
 /// </summary>
 /// <param name="trackInfo"></param>
 public QBoxTrackFormat(IsochronousTrackInfo trackInfo)
     : this()
 {
     _qBoxes = new List<QBox>();
       firstQB = new QBox(trackInfo);
       CodecTypes codecType = (trackInfo.HandlerType == "Audio")
                        ? CodecTypes.Audio
                        : (trackInfo.HandlerType == "Video") ? CodecTypes.Video : CodecTypes.Unknown;
       Codec = new Codec(codecType);
       Codec.PrivateCodecData = trackInfo.CodecPrivateData;
       DurationIn100NanoSecs = trackInfo.DurationIn100NanoSecs;
 }
Example #6
0
        /// <summary>
        /// GetDurationFromLastAudioQBox
        /// This is necessary for a dynamically growing stream.
        /// It needs to be threadsafe because we set and then reset the stream Position.
        /// Remove mutex and call this from LazyRead to avoid deadlocks.
        /// </summary>
        /// <returns></returns>
        public ulong GetDurationFromLastQBox()
        {
            if (Stream.CanSeek == false) return (0);

              lock (base.Stream) {
            long currentPos = Stream.Position; // store where we are at currently...

            // try to find an audio qbox...
            BinaryReader br = new BinaryReader(base.Stream);
            br.BaseStream.Position = br.BaseStream.Length - 8;
            while (true) {
              if (QBox.SeekPrevQBox(br)) {
            QBox box = new QBox();
            long boxPos = br.BaseStream.Position;
            box.Read(br);

            //            if (box.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC)
            if (box.mStreamDuration != 0) {
              br.BaseStream.Position = currentPos;
              ulong ans = MediaTimes[box.mSampleStreamId].TicksToTime(box.mStreamDuration, MediaTimeUtils.TimeUnitType.OneHundredNanoSeconds);
              return (ans);
            } else {
              if (br.BaseStream.Position - 8 <= 0) {
                // no more data to read...
                br.BaseStream.Position = currentPos;
              }

              br.BaseStream.Position = boxPos - 8; // rewind a bit...
            }

              } else {
            //DurationIn100NanoSecs = 0; // return value assigned to DurationIn100NanoSecs
            break; // can't find a previous qbox!!
              }
            }
            br.BaseStream.Position = currentPos;
              }
              return (0U);
        }
Example #7
0
        //nbl; removed as we shouldn't 'fix' bframe time stamps
        //private Dictionary<ushort, ulong> PrevTimeStamps = new Dictionary<ushort, ulong>();
        //private Dictionary<ushort, int> PrevIndices = new Dictionary<ushort, int>();
        public override void LazyRead(int requestedBoxCount)
        {
            QBox qbox = null;
              int i = 0;
              int boxCount = 0;

              lock (_binaryReader.BaseStream) {
            // clear out all qbox lists
            // we expect the payload buffers to stay intact because these are now referenced in Slices
            _audioBoxes.Clear();
            _videoBoxes.Clear();

            while ((boxCount < requestedBoxCount) && (_binaryReader.BaseStream.Position < _binaryReader.BaseStream.Length)) {
              try {
            qbox = new QBox();
            qbox.Read(_binaryReader);
            if (MediaTimes[qbox.mSampleStreamId] == null)
              MediaTimes[qbox.mSampleStreamId] = new MediaTimeUtils();
            MediaTimes[qbox.mSampleStreamId].SetClockRate(((qbox.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_120HZ_CLOCK) != 0U));

            //nbl; we can't fill in duration for bframes as this doesn't make sense... the CTTS info is presentation time used for mp4 stuff
            //qbox.FixTimeStamp(PrevTimeStamps, PrevIndices);  // <---- Kludge! Some qboxes may have mStreamDuration reset, fix it here

            boxCount++;
              }
            // for the moment we catch two different exceptions, yet all we do is break our while loop
              catch (EndOfStreamException eos) {
            string msg = eos.Message;
            break;
              } catch (Exception ex) {
            throw ex;
              }

              switch (qbox.SampleStreamTypeString()) {
            case "AAC":
            case "PCM":
            case "MP2A":
            case "Q711":
            case "Q722":
            case "Q726":
            case "Q728":
              _audioBoxes.Add(qbox);
              break;
            case "H264":
            case "H264_SLICE":
            case "JPEG":
            case "MPEG2_ELEMENTARY":
              if (!_videoTrackIDs.Contains(qbox.mSampleStreamId)) {
                _videoTrackIDs.Add(qbox.mSampleStreamId);
              }

              _videoBoxes.Add(qbox);
              break;
            case "VIN_STATS_GLOBAL":
            case "VIN_STATS_MB":
            case "USER_METADATA":
            case "DEBUG":
            default:
              System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", qbox.SampleStreamTypeString());
              break;
              }

              i++;
            } // end of while

              }

              // define the tracks, if we haven't already
              // note that for qboxes, we really only care about formats (QBoxTrackFormat), and tracks are just generic.
              if (MediaTracks.Count == 0 && qbox != null) {
            if (_audioBoxes.Count > 0) {
              ushort audioTrackID = _audioBoxes[0].mSampleStreamId;
              QBoxTrackFormat audioTrackFormat = new QBoxTrackFormat(_audioBoxes, audioTrackID, MediaTimes[audioTrackID]);
              QBoxAudioTrack audioTrack = new QBoxAudioTrack(audioTrackFormat, this);
              //          audioTrack.NextIndexToRead = tempIndices[audioTrackID];
              //GenericAudioTrack audioTrack = new GenericAudioTrack(audioTrackFormat, this);
              //this.Duration = audioTrack.TrackDuration;
              //this.TimeScale = (uint)audioTrack.SampleRate;
              base.AddTrack(audioTrack);
            }

            foreach (ushort trackID in _videoTrackIDs) {
              QBoxTrackFormat videoTrackFormat = new QBoxTrackFormat(_videoBoxes, trackID, MediaTimes[trackID]);
              QBoxVideoTrack videoTrack = new QBoxVideoTrack(videoTrackFormat, this);
              videoTrack.NextIndexToRead = (int) (qbox.mBoxContinuityCounter + 1);
              if (DurationIn100NanoSecs < videoTrack.TrackDurationIn100NanoSecs)
            this.DurationIn100NanoSecs = videoTrack.TrackDurationIn100NanoSecs;
              //this.TimeScale = videoTrack.TrackFormat.TimeScale;
              base.AddTrack(videoTrack);
            }
              } else if (_audioBoxes.Count > 0 && _videoBoxes.Count > 0) {
            // add qboxes to existing track formats
            foreach (GenericMediaTrack track in this.MediaTracks) {
              QBoxTrackFormat format = track.TrackFormat as QBoxTrackFormat;
              if (track is GenericAudioTrack) {
            format.AddMore(_audioBoxes);
              } else {
            format.AddMore(_videoBoxes);
              }
            }
              }

              if (currStreamLength < Stream.Length) {
            currStreamLength = Stream.Length;
            // if the duration we're getting from the last audio qbox is shorter than we already have, then don't bother
            ulong liveDuration = (ulong) GetDurationFromLastQBox(); // seek all the way forward and back, just to determine duration
            if (liveDuration > DurationIn100NanoSecs)
              DurationIn100NanoSecs = liveDuration;
            // might as well set audio and video durations
            foreach (IMediaTrack track in MediaTracks)
              track.TrackDurationIn100NanoSecs = DurationIn100NanoSecs;
              }
        }