Esempio n. 1
0
 /// <summary>
 /// Default constructor.
 /// For writing to a QBox file.
 /// </summary>
 public QBoxTrackFormat()
 {
     firstQB               = null;
     audioMetaSample       = null;
     videoMetaSample       = null;
     DurationIn100NanoSecs = 0;
 }
Esempio n. 2
0
        /// <summary>
        /// Constructor accepting a list of qboxes as input.
        /// (For reading a QBox file.)
        /// FIXME: we need to pick up the rest of the tracks (other than the first one)
        /// </summary>
        /// <param name="qboxes"></param>
        public QBoxTrackFormat(List <QBox> qboxes, ushort trackID, MediaTimeUtils mediaTime)
            : this()
        {
            _qBoxes = new List <QBox>();
            qboxes.ForEach(delegate(QBox q) { if (q.mSampleStreamId == trackID)
                                              {
                                                  _qBoxes.Add(q);
                                              }
                           });
            if (_qBoxes.Count == 0)
            {
                throw new Exception(string.Format("There is no track with ID = {0}", trackID));
            }

            _mediaTime = mediaTime;

            HasIFrameBoxes = _qBoxes.Any(box => (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0));

            firstQB = _qBoxes[0];
            if (firstQB.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_H264)
            {
                Codec   = new Codec(CodecTypes.Video);
                firstQB = _qBoxes.First(q => ((q.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_CONFIGURATION_INFO) != 0u));
                if (firstQB.mSample.v != null)
                {
                    this.videoMetaSample   = firstQB.mSample;
                    seqParamSetData        = firstQB.mSample.v.sps;
                    picParamSetData        = firstQB.mSample.v.pps;
                    Codec.PrivateCodecData = this.VideoCodecPrivateData;
                }
                else
                {
                    Codec.PrivateCodecData = ToHexString(firstQB.mSample.privateCodecData);
                }
            }
            else if (firstQB.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC)
            {
                Codec   = new Codec(CodecTypes.Audio);
                firstQB =
                    _qBoxes.First(q => ((q.mSample.a != null) && ((q.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_META_INFO) != 0u)) ||
                                  ((q.mSample.qmed != null) && ((q.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_QMED_PRESENT) != 0u)));
                this.audioMetaSample = firstQB.mSample;

                if (audioMetaSample.privateCodecData != null)
                {
                    Codec.PrivateCodecData = ToHexString(audioMetaSample.privateCodecData);
                }
                else
                {
#if USE_WAVEFORMATEX
                    GetAudioPrivateCodecDataFromWaveFormatEx();
#else
                    GetAudioPrivateCodecDataAdHoc();
#endif
                }
            }
            else if (firstQB.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_JPEG)
            {
                Codec = new Codec(CodecTypes.Video);
                if (firstQB.mSample.privateCodecData != null)
                {
                    Codec.PrivateCodecData = ToHexString(firstQB.mSample.privateCodecData);
                }
            }
            else
            {
                throw new Exception(string.Format("QBox sample type not implemented: {0}", firstQB.mSampleStreamType));
            }
        }