예제 #1
0
파일: QBox.cs 프로젝트: ctapang/GPUCyclops
            /// <summary>
            /// ReadQMed
            /// Read different types of QMed.
            /// Side-effects: sets qmed and privateCodecData vars.
            /// </summary>
            /// <param name="br">BinaryReader</param>
            /// <param name="sampleStreamType"></param>
            /// <param name="inTotalSize"></param>
            private void ReadQMed(BinaryReader br, ushort sampleStreamType, int inTotalSize, ulong sampleFlags)
            {
                long byteCount = 0;
                switch ((uint)sampleStreamType)
                {
                  case QBOX_SAMPLE_TYPE_AAC:
                QMed.QMedAAC qmedaac = new QMed.QMedAAC();
                qmedaac.payloadSize = inTotalSize - mSampleHeaderSize;
                #if ADTS
                qmedaac.Read(br, cts);
                #endif
                qmed = qmedaac;
                break;
                  //case QBOX_SAMPLE_TYPE_H264:
                  //case QBOX_SAMPLE_TYPE_H264_SLICE:
                  //  QMed.QMedH264 qmedh264 = new QMed.QMedH264();
                  //  qmed = qmedh264;
                  //  break;
                  case QBOX_SAMPLE_TYPE_QPCM:
                QMed.QMedPCM qmedpcm = new QMed.QMedPCM();
                qmed = qmedpcm;
                break;
                  case QBOX_SAMPLE_TYPE_Q711:
                QMed.QMed711 qmed711 = new QMed.QMed711();
                qmed = qmed711;
                break;
                  case QBOX_SAMPLE_TYPE_Q722:
                QMed.QMed722 qmed722 = new QMed.QMed722();
                qmed = qmed722;
                break;
                  case QBOX_SAMPLE_TYPE_Q726:
                QMed.QMed726 qmed726 = new QMed.QMed726();
                qmed = qmed726;
                break;
                  case QBOX_SAMPLE_TYPE_Q728:
                QMed.QMed728 qmed728 = new QMed.QMed728();
                qmed = qmed728;
                break;
                  case QBOX_SAMPLE_TYPE_JPEG:
                // unknown 12-byte jpeg prefix
                byte[] unknown = new byte[12];
                br.Read(unknown, 0, 12);
                QMed.QMedJpeg qmedJpeg = new QMed.QMedJpeg();
                qmed = qmedJpeg;
                break;
                  case QBOX_SAMPLE_TYPE_MPEG2_ELEMENTARY:
                  case QBOX_SAMPLE_TYPE_USER_METADATA:
                  case QBOX_SAMPLE_TYPE_QMA:
                  case QBOX_SAMPLE_TYPE_DEBUG:
                  case QBOX_SAMPLE_TYPE_VIN_STATS_GLOBAL:
                  case QBOX_SAMPLE_TYPE_VIN_STATS_MB:
                break;
                  default:
                throw new Exception(string.Format("Unexpected QBox type: {0}", sampleStreamType));
                }

                int count;
                #if ADTS
                if (qmed.GetType() != typeof(QMed.QMedAAC))
                 count = qmed.Read(br);
                #else
                count = qmed.Read(br);
                #endif
                if (count != (int)qmed.boxSize)
                  throw new Exception("QMed header count inconsistent");

                mSampleHeaderSize += (int)qmed.boxSize;

                byteCount = inTotalSize - count;

                if (byteCount < 0)
                  throw new Exception("QMed read: bad box size");

                if (byteCount > 0)
                {
                  // read-in data; set mPayload, except when it's a config box
                  if ((sampleFlags & QBOX_SAMPLE_FLAGS_CONFIGURATION_INFO) != 0)
                  {
                // get private codec data
                privateCodecData = br.ReadBytes((int)byteCount);
                mSampleHeaderSize += (int)byteCount;
                byteCount = 0;
                  }
                  else // read payload now
                  {
                mPayload = br.ReadBytes((int)byteCount);
                  }
                }
            }
예제 #2
0
        /// <summary>
        /// PrepareSampleReading
        /// There are two signatures for this method: one that accepts qbox indices (see above), and another that accepts ulong start
        /// and end times (this one).
        ///
        /// If we run out of qboxes, we read-in more.
        /// </summary>
        /// <param name="inStartSampleTime">QBoxes with time stamps equal to or more than this are included in the output list</param>
        /// <param name="inEndSampleTime">QBoxes with time stamps equal to or less than this are included in the output list</param>
        /// <param name="dummy">unused</param>
        /// <returns></returns>
        public override List <StreamDataBlockInfo> PrepareSampleReading(UInt64 inStartSampleTime, UInt64 inEndSampleTime,
                                                                        ref ulong dummy)
        {
            if (_qBoxes.Count == 0)
            {
                return(new List <StreamDataBlockInfo>()); // empty list
            }
            long  oneSecTicks          = TimeSpan.FromSeconds(1.0).Ticks;
            float scaleFactor          = oneSecTicks / this.TimeScale;
            ulong averageSliceDuration = 0UL;

            int   boxCount  = 0;
            ulong timeStamp = 0UL;

            int  startIndex = 0;
            int  endIndex   = 0;
            bool startSet   = false;

            // we traverse the _qBoxes list from the beginning (one of two traversals, because we call the other PrepareSampleReading after this)
            // the purpose of this traversal is just to determine the start and end indices.
            // FIXME: we should optimize the search for the first qbox (we can use binary search if we first convert all mSampleCTS to mean
            // the same thing -- a time stamp) CCT.
            for (int i = 0; i < _qBoxes.Count; i++)
            {
                QBox box = _qBoxes[i];

                boxCount++;

                // reject qboxes with sample size zero (no data)
                if (box.mSampleSize == 0)
                {
                    boxCount--;
                    continue;
                }

                timeStamp             = (ulong)(scaleFactor * (box.mStreamDuration - box.mSampleDuration));
                averageSliceDuration += (ulong)(scaleFactor * box.mSampleDuration);

                if (!startSet)
                {
                    long diff = ((long)inStartSampleTime - (long)timeStamp) >> 1; // divided by 2

                    // the first qbox should be the start because we dispose of qboxes already processed
                    if (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0)
                    {
                        startIndex = (int)box.mFrameCounter - 1;
                        startSet   = true;
                    }

                    if (!startSet)
                    {
                        throw new Exception("Track problem: first box in queue is not sync point");
                    }
                }

                if ((((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0) && (inEndSampleTime <= timeStamp))
                // NearEnd(boxCount, inEndSampleTime, timeStamp, scaleFactor))
                {
                    endIndex = (int)box.mFrameCounter - 1;
                    // do not put this sync box in List; it should instead be the first box in next fragment
                    break;
                }

#if ADTS
                if (box.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC)
                {
                    QMed.QMedAAC qmedaac = (QMed.QMedAAC)box.mSample.qmed;
#if PES
                    datBlock.PESandADTSHeaders = new byte[qmedaac.pesHeader.Length + qmedaac.adtsHeader.Length];
                    qmedaac.pesHeader.CopyTo(datBlock.PESandADTSHeaders, 0);
                    qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, qmedaac.pesHeader.Length);
#else
                    datBlock.PESandADTSHeaders = new byte[qmedaac.adtsHeader.Length];
                    qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, 0);
#endif
                    datBlock.SampleSize += datBlock.PESandADTSHeaders.Length;
                }
#endif

                if (box == _qBoxes.Last())
                {
                    base.GetNextBatch(GenericMediaStream.MAX_BOXES_TO_READ, 0);
                    // callee should set end FIXME: is box.mCurrentPosition being set?
                }
            } // end of for loop

            // we did not find the end, which means we ran out of qboxes to process
            if (endIndex == 0)
            {
                averageSliceDuration /= (uint)boxCount;
                int desiredBoxCount =
                    (int)(((inEndSampleTime - inStartSampleTime) + averageSliceDuration) / averageSliceDuration);
                endIndex = startIndex + desiredBoxCount;
            }

            if (startIndex == endIndex)
            {
                throw new Exception("Traversing QBoxes did not yield any qbox.");
            }

            return(PrepareSampleReading(startIndex, endIndex, ref dummy));
        }
예제 #3
0
파일: QBox.cs 프로젝트: ctapang/GPUCyclops
 public QBoxSample(int dataSize, ulong sampleFlags, ushort sampleStreamType)
 {
     if ((sampleFlags & QBox.QBOX_SAMPLE_FLAGS_META_INFO) != 0)
     {
       if (sampleStreamType == QBox.QBOX_SAMPLE_TYPE_H264)
       {
     if (v != null)
       throw new Exception("QBoxSample.constructor: QBoxMetaV v already set");
     v = new QBoxMetaV();
     mSampleHeaderSize = 32;
       }
       else if ((sampleStreamType == QBox.QBOX_SAMPLE_TYPE_QMA) || (sampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC))
       {
     if (a != null)
       throw new Exception("There should only be one QBoxMetaA for audio");
     a = new QBoxMetaA();
     mSampleHeaderSize = 24;
       }
       else throw new Exception("QBoxSample.Read: Sample stream type not found.");
     }
     else if ((sampleFlags & QBox.QBOX_SAMPLE_FLAGS_QMED_PRESENT) > 0)
     {
       switch ((uint)sampleStreamType)
       {
     case QBOX_SAMPLE_TYPE_AAC:
       QMed.QMedAAC qmedaac = new QMed.QMedAAC();
       qmed = qmedaac;
       break;
     //case QBOX_SAMPLE_TYPE_H264:
     //case QBOX_SAMPLE_TYPE_H264_SLICE:
     //  QMed.QMedH264 qmedh264 = new QMed.QMedH264();
     //  qmed = qmedh264;
     //  break;
     case QBOX_SAMPLE_TYPE_QPCM:
       QMed.QMedPCM qmedpcm = new QMed.QMedPCM();
       qmed = qmedpcm;
       break;
     case QBOX_SAMPLE_TYPE_Q711:
       QMed.QMed711 qmed711 = new QMed.QMed711();
       qmed = qmed711;
       break;
     case QBOX_SAMPLE_TYPE_Q722:
       QMed.QMed722 qmed722 = new QMed.QMed722();
       qmed = qmed722;
       break;
     case QBOX_SAMPLE_TYPE_Q726:
       QMed.QMed726 qmed726 = new QMed.QMed726();
       qmed = qmed726;
       break;
     case QBOX_SAMPLE_TYPE_Q728:
       QMed.QMed728 qmed728 = new QMed.QMed728();
       qmed = qmed728;
       break;
     case QBOX_SAMPLE_TYPE_JPEG:
     case QBOX_SAMPLE_TYPE_MPEG2_ELEMENTARY:
     case QBOX_SAMPLE_TYPE_USER_METADATA:
     case QBOX_SAMPLE_TYPE_QMA:
     case QBOX_SAMPLE_TYPE_DEBUG:
     case QBOX_SAMPLE_TYPE_VIN_STATS_GLOBAL:
     case QBOX_SAMPLE_TYPE_VIN_STATS_MB:
       break;
     default:
       throw new Exception(string.Format("Unknown QMed type: {0}", sampleStreamType));
       }
       mSampleHeaderSize = (int)qmed.boxSize;
     }
     else
     {
       mSampleHeaderSize = 0;
     }
 }
예제 #4
0
        /// <summary>
        /// PrepareSampleReading
        /// In MP4, reading of box headers is separate from reading of the H264 and audio bits. This is because the bits are stored
        /// in a different place in the file (or may in fact be in a separate file). In a QBox file, however, both headers and bits
        /// are stored in the qbox. It makes no sense to separate the two. Therefore, in this implementation of PrepareSampleReading,
        /// we actually read the bits together with the headers. The routine WriteSamples doesn't do much.
        ///
        /// There are two signatures for this method: one that accepts qbox indices (this one), and another that accepts ulong start
        /// and end times.
        ///
        /// We don't keep the qboxes. QBoxes already processed are disposed of as a last step. If we run out of qboxes, we read-in
        /// more.
        /// </summary>
        /// <param name="inStartSampleIndex">int index to first qbox to be processed</param>
        /// <param name="inEndSampleIndex">int index to last qbox to be processed</param>
        /// <param name="dummy">not used</param>
        /// <returns></returns>
        public override List <StreamDataBlockInfo> PrepareSampleReading(int inStartSampleIndex, int inEndSampleIndex,
                                                                        ref ulong dummy)
        {
            List <StreamDataBlockInfo> retList = new List <StreamDataBlockInfo>();

            if (_qBoxes.Count == 0)
            {
                return(retList);
            }


            float scaleFactor     = TimeSpan.FromSeconds(1.0).Ticks / this.TimeScale;
            bool  foundFirstSlice = false;
            int   boxCount        = 0;

            // we traverse the _qBoxes list from the beginning;
            // can't use foreach because _qBoxes can change;
            // box.mIndex is NOT the same as index i.
            // we use a for loop only because we are adding qboxes to _qBoxes as part of the loop
            for (int i = 0; i < _qBoxes.Count; i++)
            {
                QBox box = _qBoxes[i];
                boxCount++;

                // reject qboxes with sample size zero (no data)
                if (box.mSampleSize == 0)
                {
                    continue;
                }

                // we shouldn't be searching for the first box of interest, because it should always be the first one
                // it should always be the first one because we threw away all boxes already processed
                if (((ulong)inStartSampleIndex > (box.mFrameCounter - 1)) ||
                    ((!foundFirstSlice) && (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0)))
                {
                    continue; // skip
                }
                else if ((ulong)inStartSampleIndex == (box.mFrameCounter - 1))
                {
                    foundFirstSlice = true;
                }
                else if (!foundFirstSlice)
                {
                    _qBoxes.Clear();
                    base.GetNextBatch(0, inStartSampleIndex); // throw new Exception("First IFrame not found");
                    i        = -1;                            // this gets incremented to zero
                    boxCount = 0;                             // start all over
                    continue;
                }

                StreamDataBlockInfo datBlock = new Slice();

                switch (box.SampleStreamTypeString())
                {
                case "AAC":
                    datBlock           = new ADTSDataBlockInfo();
                    datBlock.SliceType = SliceType.AAC;
                    break;

                case "Q711":
                case "PCM":
                    datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for PCM
                    break;

                case "MP2A":
                    datBlock.SliceType = SliceType.MP4A;
                    break;

                case "Q722": // ADPCM
                case "Q726":
                case "Q728":
                    datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for ADPCM
                    break;

                case "H264":
                case "H264_SLICE":
                    datBlock = new NaluDelimiterBlockInfo();
                    if (((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) == 0)
                    {
                        datBlock.SliceType = SliceType.DFrame;
                    }
                    else
                    {
                        datBlock.SliceType = SliceType.IFrame;
                    }
                    if ((box.mSample != null) && (box.mSample.v != null))
                    {
                        NaluDelimiterBlockInfo blockInfo = datBlock as NaluDelimiterBlockInfo;
                        blockInfo.AccessUnitDelimiter = box.mSample.v.aud;
                    }
                    break;

                case "JPEG":
                    datBlock.SliceType = SliceType.JPEG;
                    break;

                case "MPEG2_ELEMENTARY":
                    datBlock.SliceType = SliceType.Unknown; // FIXME: add sample type for MPEG2
                    break;

                case "VIN_STATS_GLOBAL":
                case "VIN_STATS_MB":
                case "USER_METADATA":
                case "DEBUG":
                default:
                    System.Diagnostics.Debug.WriteLine("Unknown QBox: {0}", box.SampleStreamTypeString());
                    break;
                }

                datBlock.CTS           = (ulong)((box.mSampleCTS - (box.mStreamDuration - box.mSampleDuration)) * scaleFactor);
                datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration);
                if (box.mFrameCounter == 0 && box.mStreamDuration == 0)
                {
                    datBlock.TimeStampNew = 0;
                }
                else if (box.mStreamDuration == 0)
                {
                    datBlock.TimeStampNew = null;
                }
                else
                {
                    datBlock.TimeStampNew = (ulong)(scaleFactor * (box.mStreamDuration - box.mSampleDuration));
                }
                datBlock.SliceSize = box.mSampleSize;
                datBlock.index     = (int)box.mFrameCounter - 1; // boxCount;

                // NOTE! For qbox, StreamOffset has a different meaning than in MP4.
                // Here, StreamOffset is the offset to the qbox itself; whereas in
                // MP4, StreamOffset is the offset to the H264 payload.
                // In GenericMediaTrack.GetSample, StreamOffset is used as in MP4, but
                // this method is overriden by another in QBoxVideoTrack that does not use StreamOffset.
                // For flashback to work for both MP4 and qbox files, the caching mechanism
                // is different in MP4 from than in qbox.
                datBlock.StreamOffset = (ulong)box.mHeaderPosition; // needed for flashback to work

                // set payload
                Slice slice = datBlock as Slice;
                slice.SliceBytes = box.mSample.mPayload;

#if ADTS
                if (box.mSampleStreamType == QBox.QBOX_SAMPLE_TYPE_AAC)
                {
                    QMed.QMedAAC qmedaac = (QMed.QMedAAC)box.mSample.qmed;
#if PES
                    datBlock.PESandADTSHeaders = new byte[qmedaac.pesHeader.Length + qmedaac.adtsHeader.Length];
                    qmedaac.pesHeader.CopyTo(datBlock.PESandADTSHeaders, 0);
                    qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, qmedaac.pesHeader.Length);
#else
                    datBlock.PESandADTSHeaders = new byte[qmedaac.adtsHeader.Length];
                    qmedaac.adtsHeader.CopyTo(datBlock.PESandADTSHeaders, 0);
#endif
                    datBlock.SampleSize += datBlock.PESandADTSHeaders.Length;
                }
#endif
                if (datBlock.SliceDuration == 0)
                {
                    datBlock.SliceDuration = (uint)(scaleFactor * box.mSampleDuration); // any non-zero duration is better
                }

                if ((((uint)box.mSampleFlags & QBox.QBOX_SAMPLE_FLAGS_SYNC_POINT) != 0) && ((box.mFrameCounter - 1) >= (ulong)inEndSampleIndex))
                {
                    boxCount--;
                    break; // don't put last IFrame box in return list
                }

                retList.Add(datBlock);

                if (box == _qBoxes.Last())
                {
                    base.GetNextBatch(GenericMediaStream.MAX_BOXES_TO_READ, 0);
                    // callee should set end FIXME: is box.mCurrentPosition being set?
                }
            } // end of for loop

            _qBoxes.RemoveRange(0, boxCount);

            return(retList);
        }