Esempio n. 1
0
        private Fragment GetNextAudioFrag()
        {
            if (nextAudioFragPosition < 0)
            {
                return(null);                       // there are no more!!
            }
            boxReader.BaseStream.Position = nextAudioFragPosition;

            // we know where at least the next frag is as we prepared this prior to the call of this function...
            Fragment answer = new Fragment(GetTimeScale(audioTrackID), GetPayloadType(audioTrackID), runningTimeIn100NanoSecs, runningSliceIndex);

            answer.Read(boxReader);
            runningTimeIn100NanoSecs += (ulong)TimeArithmetic.ConvertToStandardUnit(answer.TimeScale, (decimal)answer.Duration);
            runningSliceIndex        += answer.Length;

            nextAudioFragPosition = -1;
            while (this.boxReader.PeekNextBoxType() == BoxTypes.MovieFragment)
            {
                long     fragPos = boxReader.BaseStream.Position;
                Fragment tmp     = new Fragment();

                int trakID = (int)tmp.GetMP4TrackID(boxReader);
                if (GetFragmentHandlerType(trakID) == "soun")
                {
                    nextAudioFragPosition = fragPos;
                    break;
                }
            }

            return(answer);
        }
Esempio n. 2
0
        public MediaBox(IsochronousTrackInfo trackInfo)
            : this()
        {
            ulong scaledDuration = (ulong)TimeArithmetic.ConvertToTimeScale(trackInfo.TimeScale, trackInfo.DurationIn100NanoSecs);

            MediaHeaderBox = new MediaHeaderBox(this, scaledDuration, trackInfo.TimeScale);
            this.Size     += MediaHeaderBox.Size;
            Codec codec = null;

            if (trackInfo.GetType() == typeof(RawAudioTrackInfo))
            {
                RawAudioTrackInfo audioInfo = (RawAudioTrackInfo)trackInfo;
                codec = new Codec(CodecTypes.Audio);
                codec.PrivateCodecData = audioInfo.CodecPrivateData;
            }
            else if (trackInfo.GetType() == typeof(RawVideoTrackInfo))
            {
                RawVideoTrackInfo videoInfo = (RawVideoTrackInfo)trackInfo;
                codec = new Codec(CodecTypes.Video);
                codec.PrivateCodecData = videoInfo.CodecPrivateData;
            }
            HandlerReferenceBox = new HandlerReferenceBox(this, codec);
            this.Size          += HandlerReferenceBox.Size;
            MediaInformationBox = new MediaInformationBox(this, trackInfo);
            // MediaInformationBox.Size is indeterminate at this time; it is determined only during SampleTableBox.FinalizeBox
        }
Esempio n. 3
0
        public MovieMetadataBox(List <IsochronousTrackInfo> trackInfos, float rate, float volume, uint[] matrix)
            : base(BoxTypes.Movie)
        {
            // initialize movie duration to zero, then increment it for every slice that is written
            ulong scaledDuration = (ulong)TimeArithmetic.ConvertToTimeScale(trackInfos[0].MovieTimeScale, trackInfos[0].MovieDurationIn100NanoSecs);

            MovieHeaderBox = new MovieHeaderBox(trackInfos[0].MovieTimeScale, scaledDuration, rate, volume, matrix);
            this.Size     += MovieHeaderBox.Size;
            TrackBoxes     = new TrackBox[trackInfos.Count]; // may have more than 2 tracks
            // MovieExtendsBox should only exist if this is a fragment
            if (trackInfos[0].IsFragment)
            {
                MovieExtendsBox = new MovieExtendsBox(this, trackInfos);
                this.Size      += MovieExtendsBox.Size;
            }
        }
Esempio n. 4
0
        public TrackBox(IsochronousTrackInfo trackInfo)
            : this()
        {
            float height = 0.0f;
            float width  = 0.0f;

            if (trackInfo is RawVideoTrackInfo)
            {
                // set the TRACK width, which may differ from SampleDescription width and height, depending on Aspect Ratio
                RawVideoTrackInfo rvti = (RawVideoTrackInfo)trackInfo;
                height = rvti.Height;
                width  = rvti.Width * ((float)rvti.AspectRatioX / (float)rvti.AspectRatioY);
            }
            ulong scaledDuration = (ulong)TimeArithmetic.ConvertToTimeScale(trackInfo.MovieTimeScale, trackInfo.DurationIn100NanoSecs);

            TrackHeaderBox = new TrackHeaderBox((uint)trackInfo.TrackID, scaledDuration, height, width);
            // TrackHeaderBox = new TrackHeaderBox((uint)trackInfo.TrackID, (trackInfo.Duration * oneSecondTicks) / trackInfo.TimeScale, height, width);
            this.Size += TrackHeaderBox.Size;

            // skip the TrackReferenceBox for now
            //TrackReferenceBox = new TrackReferenceBox((uint)trackInfo.TrackID);
            //this.Size += TrackReferenceBox.Size;

#if EDTS_OUT
            EdtsBox = (EdtsBox)trackInfo.GetEdtsBox();
            if (EdtsBox != null)
            {
                this.Size += EdtsBox.Size;
                EdtsBox.ScaleToTarget(trackInfo.MovieTimeScale, trackInfo.TimeScale);
            }
#endif

            MediaBox = new MediaBox(trackInfo);
            // MediaBox.Size can only be determined during FinalizeBox
            // NOTE: NO Esds Box
        }
Esempio n. 5
0
        private uint SampleCountInLastBatch = 0; // last batch sample count for this trak box (this is independent of the other trak box)

        /// <summary>
        /// InitSampleTableBoxFromStreamLocations
        /// Initialize the boxes that point to where the payload bits are, without writing them out to final destination file yet.
        /// Major change (06/06/2012): favor creating new chunks over accumulating slices in a chunk.
        /// We take it to the extreme here, like VLC does it: we create a new chunk for every slice/sample.
        /// What this does is make the stsc box small, but the stco box very large. The advantage is that
        /// every slice now has an offset into mdat (and the slice crawler can't possibly go out of sync).
        /// </summary>
        /// <param name="streamLocations">List of StreamDataBlockInfo extracted from source stream, possibly using InitSampleStreamFromSampleTableBox above.</param>
        public void InitSampleTableBoxFromStreamLocations(List <StreamDataBlockInfo> streamLocations, ref ulong currMdatOffset)
        {
            // if this is the first call, create temp files
            if (SttsCountsWriter == null)
            {
                CreateTempFiles();
            }

            if (CompositionTimeToSample == null && (CTTSOut) && (streamLocations.Any(d => (d.CTS > 0UL) || (d.SliceType == SliceType.BFrame))))
            {
                CompositionTimeToSample = new CompositionTimeToSample(this);
            }

            if (streamLocations.Count == 0)
            {
                throw new Exception("InitSampleTableBoxFromStreamLocations: SampleStreamLocations list empty.");
            }
            bool needNewChunk = true;

            foreach (StreamDataBlockInfo sample in streamLocations)
            {
                uint scaledDuration = (uint)TimeArithmetic.ConvertToTimeScale(parent.parent.MediaHeaderBox.TimeScale, sample.SliceDuration);
                if (LastDuration == 0)
                {
                    sampleCountInStts = 1;
                }
                else if (LastDuration == scaledDuration)
                {
                    sampleCountInStts++;
                }
                else
                {
                    WriteToSttsTempFile();
                    sampleCountInStts = 1; // this one for which duration is different counts as one
                }
                LastDuration = scaledDuration;
                //TimeTicks += sample.SliceDuration;
                if (sample.SliceType == SliceType.IFrame)
                {
                    SyncSampleMapWriter.Write(SampleIndex); // if the SyncSampleMapStream has zero length when all is done, then its box should be null
                    CurrSyncSampleMapCount++;
                }
                // compute CTTS from TimeStamp and CTS
                if (CompositionTimeToSample != null)
                {
                    // CTS = Composition Time of the Sample, so these values are ever-increasing
                    // CTTS = Composition Time relative to Time of the Sample, so these are really either 0 or some multiple of the typical sample duration

                    // CTTS values for an i-frame, for example, is always zero, as its composition time relative to the sample:
                    // CTTS-iframe = SampleTime - CTS = Always 0

                    if (sample.SliceType == SliceType.IFrame)
                    {
                        // relative time for an iframe is always 0
                        CompositionTimeToSample.AddEntry(0);
                        LastSynchTime = 0;
                    }
                    else
                    {
                        //if (sample.TimeStampNew.HasValue) {
                        //  // relative time for a d-frame is always 0
                        //  uint TimeFromLastSample = (uint)TimeArithmetic.ConvertToTimeScale(parent.parent.MediaHeaderBox.TimeScale, sample.SliceDuration);
                        //  CompositionTimeToSample.AddEntry((uint)TimeFromLastSample);
                        //} else {
                        //  // this means we are a b-frame
                        //  CompositionTimeToSample.AddEntry((uint)uint.MaxValue);
                        //}


                        if (!sample.TimeStampNew.HasValue || sample.SliceType == SliceType.BFrame)
                        {
                            // this means we are a b-frame
                            uint TimeFromLastSample = (uint)TimeArithmetic.ConvertToTimeScale(parent.parent.MediaHeaderBox.TimeScale, sample.SliceDuration);
                            // we get further from a sync time for each consecutive b-frame we have
                            // as you can see above if we encounter an i or d frame we snap back to a delta of 0
                            LastSynchTime += TimeFromLastSample;
                            CompositionTimeToSample.AddEntry((uint)LastSynchTime);
                        }
                        else if (sample.TimeStampNew.HasValue)
                        {
                            // relative time for a d-frame is always 0
                            CompositionTimeToSample.AddEntry(0);
                            LastSynchTime = 0;
                        }
                    }
                }

                // determine which chunk to put this sample in
                SampleSizeWriter.Write((uint)sample.SliceSize);
                SampleToChunkBox.SetFileOffsetForChunk(SampleIndex, (uint)sample.SliceSize, 1u /* (uint)streamLocations.Count */, needNewChunk, ref currMdatOffset);
                needNewChunk = true; // always create a new chunk, thereby having only a single slice in every chunk (as in VLC output)
                SampleIndex++;
            }

            // set last count
            SampleCountInLastBatch = (uint)streamLocations.Count;
        }
Esempio n. 6
0
        /// <summary>
        /// GetStartAndEndIndex
        /// Given a start time and an end time, determine the start slice index and end slice index.
        /// </summary>
        /// <param name="edtsBox"EditsBox></param>
        /// <param name="sampleTimeScale">uint - sample time scale</param>
        /// <param name="startTime">ulong - start time</param>
        /// <param name="endTime">ulong - end time</param>
        /// <param name="startIndex">out param: start index</param>
        /// <param name="endIndex">out param: end index</param>
        private void GetStartAndEndIndex(EdtsBox edtsBox, uint sampleTimeScale, ulong startTime, ulong endTime, out uint startIndex, out uint endIndex)
        {
            startIndex = 0;
            endIndex   = 0;

            ulong ticksDuration = (ulong)TimeArithmetic.ConvertToStandardUnit(sampleTimeScale, parent.parent.MediaHeaderBox.Duration);

            if (edtsBox != null)
            {
                ticksDuration = (ulong)(edtsBox.GetEditTrackDuration(sampleTimeScale) * (decimal)TimeSpan.TicksPerSecond);
            }
            if (ticksDuration < startTime)
            {
                return;
            }

            DecodingTimeToSampleBox stts = this.DecodingTimeToSampleBox;
            SyncSampleMapBox        stss = this.SyncSampleMapBox;

            uint  sampleCount = 0;
            ulong timeT       = 0;
            ulong currScaledT = 0;
            ulong prevScaledT = 0;

            uint[] counts     = stts.SampleCount;
            uint[] deltaTimes = stts.SampleDelta;
            bool   startSet   = false;


            for (int i = 0; i < stts.EntryCount; i++)
            {
                for (int j = 0; j < counts[i]; j++)
                {
                    if ((currScaledT >= startTime) && (!startSet) && ((stss == null) || (stss.IsIFrame(sampleCount + 1))))
                    {
                        startSet   = true;
                        startIndex = sampleCount + 1;
                    }

                    if (((stss == null) || stss.IsIFrame(sampleCount + 2)) && (currScaledT > endTime))
                    {
                        endIndex = sampleCount + 1;
                        break;
                    } // close of if (currScaledT > endTime)

                    prevScaledT = currScaledT;
                    timeT      += deltaTimes[i];
                    sampleCount++;
                    currScaledT = (ulong)TimeArithmetic.ConvertToStandardUnit(sampleTimeScale, timeT);
                } // end of for j

                if (endIndex > 0) // end sample found
                {
                    break;
                }
            } // end of for i

            if ((endIndex == 0) && startSet) // end sample not found
            {
                endIndex = sampleCount + 1;
            }
        }
Esempio n. 7
0
        /// <summary>
        /// GetOneSample
        /// Get one frame or sample from the moof structures. There is no stss in a fragment, so we need to determine
        /// whether a frame is an IFrame or not by examining the IndependentAndDisposableSamplesBox.
        /// </summary>
        /// <param name="samples"></param>
        /// <param name="tfhd"></param>
        /// <returns></returns>
        private StreamDataBlockInfo GetOneSample(List <TrackFragmentRunSample> samples, TrackFragmentHeaderBox tfhd)
        {
            if (_currentFrame == samples.Count + _startIndex)
            {
                return(null);
            }

            uint fixedFrameSizeInBytes = tfhd.DefaultSampleSize;

            if (fixedFrameSizeInBytes == 0)
            {
                fixedFrameSizeInBytes = samples[_currentFrame - _startIndex].SampleSize;
            }

            if (fixedFrameSizeInBytes == 0) // if it's still zero, then we have a problem
            {
                throw new Exception("Sample size zero");
            }

            // is there enough data left to read the next frame?
            if (this._baseDataOffset + _currentOffsetInBytes + fixedFrameSizeInBytes > (ulong)_reader.BaseStream.Length)
            {
                return(null);
            }

            // currently DRM is not yet supported in this GetFrame routine, unlike the FragmentedMp4ParserImplementation
            //        if ((this.m_drmIVOffsets != null) && (this.m_numDrmIVs > this.m_frameIndex))
            //        {
            //            length = this.m_drmIVSizes[this.m_frameIndex];
            //            destinationArray = new byte[length];
            //            Array.Copy(this.m_headerBuffer, this.m_drmIVOffsets[this.m_frameIndex], destinationArray, 0, length);
            //        }

            uint fixedDuration = tfhd.DefaultSampleDuration;

            if (samples[_currentFrame - _startIndex].SampleDuration != 0)
            {
                fixedDuration = samples[_currentFrame - _startIndex].SampleDuration;
            }
            if (_timeScale > 0) // time scale is 1 for ODS assets
            {
                // scale time
                fixedDuration = (uint)TimeArithmetic.ConvertToStandardUnit(_timeScale, fixedDuration);
            }

            StreamDataBlockInfo oneFrameData = new StreamDataBlockInfo();

            //RawFrameData ans = new RawFrameData(CurrentTime, currentOffsetInBytes, fixedFrameSizeInBytes, fixedDuration, destinationArray);
            oneFrameData.SliceDuration = fixedDuration;
            oneFrameData.SliceSize     = (int)fixedFrameSizeInBytes;
            oneFrameData.StreamOffset  = this._baseDataOffset + _currentOffsetInBytes;
            GetSliceTypeAndFrameNum(oneFrameData);

            // for ISM, TimeStampNew will always have a value
            oneFrameData.TimeStampNew = (ulong)_currentTime;
            oneFrameData.index        = _currentFrame;

            _currentOffsetInBytes += fixedFrameSizeInBytes;
            _currentTime          += fixedDuration;
            _currentFrame++;

            return(oneFrameData);
        }
Esempio n. 8
0
        public override void LazyRead(int requestedBoxCount)
        {
            //this.m_reader.BaseStream.Seek(0L, SeekOrigin.Begin);

            BoxType boxType;

            while (this.m_reader.BaseStream.Position < this.m_reader.BaseStream.Length)
            {
                boxType = this.m_reader.PeekNextBoxType();
                if (boxType == BoxTypes.MovieFragment)
                {
                    IsMediaStreamFragmented = true;
                    break; // don't process fragment here, do it in the ISMV class (which is derived from this one)
                }
                else if (boxType == BoxTypes.FileType)
                {
                    ftb = new FileTypeBox();
                    ftb.Read(this.m_reader);
                    Hints.CompatibleBrands = ftb.CompatibleBrands;
                }
                else if (boxType == BoxTypes.Movie)
                {
                    mmb = new MovieMetadataBox();
                    mmb.Read(this.m_reader);
                    if (mmb.ObjectDescriptorBox != null)
                    {
                        base.ObjectDescriptor = mmb.ObjectDescriptorBox.Contents;
                    }
                    if (mmb.UserDataBox != null)
                    {
                        base.UserData = mmb.UserDataBox.Data;
                    }
                }
                else if (boxType == BoxTypes.Free)
                {
                    FreeBox freeb = new FreeBox();
                    freeb.Read(this.m_reader);
                    FreeBoxList.Add(freeb);
                }
                else if (boxType == BoxTypes.MediaData) // mdat
                {
                    MediaDataBox mdb = new MediaDataBox();
                    mdb.Read(this.m_reader); // this doesn't really read all of mdat: payload is skipped
                    MediaDataBoxList.Add(mdb);
                }
                else if (boxType == BoxTypes.MovieFragmentRandomAccess)
                {
                    MovieFragmentRandomAccessBox = new MovieFragmentRandomAccessBox();
                    MovieFragmentRandomAccessBox.Read(this.m_reader);
                }
                else if (boxType == BoxTypes.Free)
                {
                    FreeBox freeBox = new FreeBox();
                    freeBox.Read(this.m_reader);
                    FreeBoxList.Add(freeBox);
                }
                else
                {
                    // invalid box, just stop reading
                    break;
                    //Box box2 = new Box(boxType);
                    //box2.Read(this.m_reader);
                    //FreeBoxList.Add(box2);
                    //Debug.WriteLine(string.Format("Unknown BoxType: {0}", box2.Type.ToString()));
                }
            } // end of while

            // now that we know all about the input file in memory... fill a few structures to help others gain access to this information...
            // this is for the case in which the mp4 file contains moov boxes (MovieMetadataBoxes).
            if ((mmb != null) && (MediaTracks.Count == 0))
            {
                DurationIn100NanoSecs = (ulong)TimeArithmetic.ConvertToStandardUnit(mmb.MovieHeaderBox.TimeScale, mmb.MovieHeaderBox.Duration);
                Hints.StreamTimeScale = mmb.MovieHeaderBox.TimeScale;
                if (!IsMediaStreamFragmented)
                {
                    CreateTracks <GenericAudioTrack, MP4VideoTrack, MP4TrackFormat>();
                }
            }
        } // end of Read method