public static float CalculateTimeScale(MovieMetadataBox moovBox, TrackBox trackBox) { MovieHeaderBox headerBox = moovBox.MovieHeaderBox; ulong moovDuration = headerBox.Duration; uint moovTimeScale = headerBox.TimeScale; MediaHeaderBox mdhdBox = trackBox.MediaBox.MediaHeaderBox; ulong mediaDuration = mdhdBox.Duration; float mediaTimeScale = mdhdBox.TimeScale; // Note that time scales may differ between moov and each media (because sampling rate can differ?) moovDuration = moovDuration / moovTimeScale; mediaDuration = (ulong)(mediaDuration / mediaTimeScale); long diff = Math.Abs((long)moovDuration - (long)mediaDuration); if ((diff * diff) > (long)((moovDuration * moovDuration) / 100)) // must be within 1% { throw new Exception("Media Box Header inconsistent with Track Header"); } // scale to 10,000,000 ticks per second mediaTimeScale /= TimeSpan.FromSeconds(1.0).Ticks; if (mediaTimeScale == 0) { throw new Exception("MP4VideoTrack: media time scale is zero"); } return(mediaTimeScale); }
public MovieExtendsBox(MovieMetadataBox inParent, List <IsochronousTrackInfo> trackInfos) : this(inParent) { MovieExtendsHeaderBox = new MovieExtendsHeaderBox((uint)0); // initial duration should be zero. prev: trackInfos[0].MovieDuration); this.Size += MovieExtendsHeaderBox.Size; if (TrackExtendBoxes == null) { TrackExtendBoxes = new TrackExtendsBox[trackInfos.Count]; } //TrackBox[] tracks = parent.TrackBoxes; int i = 0; foreach (IsochronousTrackInfo tri in trackInfos) { if (tri.GetType() == typeof(RawAudioTrackInfo)) { RawAudioTrackInfo rati = (RawAudioTrackInfo)tri; TrackExtendBoxes[i] = new TrackExtendsBox((uint)(i + 1), 1, 0, 0, 0); // trackID for audio is 1, sample description index within audio track is 1 this.Size += TrackExtendBoxes[i].Size; } else if (tri.GetType() == typeof(RawVideoTrackInfo)) { RawVideoTrackInfo rvti = (RawVideoTrackInfo)tri; TrackExtendBoxes[i] = new TrackExtendsBox((uint)(i + 1), 1, 0, 0, 0); // trackID for video is 2, sample description index within video track is 1 this.Size += TrackExtendBoxes[i].Size; } i++; } }
public static float CalculateTimeScale(MovieMetadataBox moovBox, TrackBox trackBox) { MovieHeaderBox headerBox = moovBox.MovieHeaderBox; ulong moovDuration = headerBox.Duration; uint moovTimeScale = headerBox.TimeScale; MediaHeaderBox mdhdBox = trackBox.MediaBox.MediaHeaderBox; ulong mediaDuration = mdhdBox.Duration; float mediaTimeScale = mdhdBox.TimeScale; // Note that time scales may differ between moov and each media (because sampling rate can differ?) moovDuration = moovDuration / moovTimeScale; mediaDuration = (ulong)(mediaDuration / mediaTimeScale); long diff = Math.Abs((long)moovDuration - (long)mediaDuration); if ((diff * diff) > (long)((moovDuration * moovDuration) / 100)) // must be within 1% throw new Exception("Media Box Header inconsistent with Track Header"); // scale to 10,000,000 ticks per second mediaTimeScale /= TimeSpan.FromSeconds(1.0).Ticks; if (mediaTimeScale == 0) throw new Exception("MP4VideoTrack: media time scale is zero"); return mediaTimeScale; }
public MovieExtendsBox(MovieMetadataBox inParent, List<IsochronousTrackInfo> trackInfos) : this(inParent) { MovieExtendsHeaderBox = new MovieExtendsHeaderBox((uint)0); // initial duration should be zero. prev: trackInfos[0].MovieDuration); this.Size += MovieExtendsHeaderBox.Size; if (TrackExtendBoxes == null) TrackExtendBoxes = new TrackExtendsBox[trackInfos.Count]; //TrackBox[] tracks = parent.TrackBoxes; int i = 0; foreach (IsochronousTrackInfo tri in trackInfos) { if (tri.GetType() == typeof(RawAudioTrackInfo)) { RawAudioTrackInfo rati = (RawAudioTrackInfo)tri; TrackExtendBoxes[i] = new TrackExtendsBox((uint)(i + 1), 1, 0, 0, 0); // trackID for audio is 1, sample description index within audio track is 1 this.Size += TrackExtendBoxes[i].Size; } else if (tri.GetType() == typeof(RawVideoTrackInfo)) { RawVideoTrackInfo rvti = (RawVideoTrackInfo)tri; TrackExtendBoxes[i] = new TrackExtendsBox((uint)(i + 1), 1, 0, 0, 0); // trackID for video is 2, sample description index within video track is 1 this.Size += TrackExtendBoxes[i].Size; } i++; } }
public void CollectAllPayload() { MovieMetadataBox movieMetadata = this.mmb; TrackBox[] tracks = movieMetadata.TrackBoxes; done = new bool[tracks.Length]; SampleCountsInChunk = new List <uint> [tracks.Length]; int[] NextChunkOffsetIndex = new int[tracks.Length]; OffsetValues = new uint[tracks.Length]; ChunkOffSetBox[] stco = new ChunkOffSetBox[tracks.Length]; uint[] sample = new uint[tracks.Length]; for (int i = 0; i < tracks.Length; i++) { InitializeSampleCountsInChunk(i); NextChunkOffsetIndex[i] = 0; stco[i] = tracks[i].MediaBox.MediaInformationBox.SampleTableBox.ChunkOffSetBox; sample[i] = 0; } for (int i = 0; i < tracks.Length; i++) { OffsetValues[i] = stco[i].ChunkOffsets[NextChunkOffsetIndex[i]]; } int track; uint currOffset = GetNextOffset(tracks.Length, out track); Stream.Position = (long)currOffset; while (done.Any(d => d == false)) { uint offset = GetNextOffset(tracks.Length, out track); if (offset != currOffset) { throw new Exception("Input MP4 file has a problem with chunk offsets"); } NextChunkOffsetIndex[track]++; if (NextChunkOffsetIndex[track] == stco[track].ChunkOffsets.Length) { done[track] = true; OffsetValues[track] = uint.MaxValue; } else { OffsetValues[track] = stco[track].ChunkOffsets[NextChunkOffsetIndex[track]]; } int chunk = SampleToChunkIndex(track, sample[track]); uint count = SampleCountsInChunk[track][chunk]; SampleSizeBox stsz = tracks[track].MediaBox.MediaInformationBox.SampleTableBox.SampleSizeBox; for (int k = 0; k < count; k++, sample[track]++) { ProcessSample(currOffset, stsz.SampleSizeArray[sample[track]]); currOffset += stsz.SampleSizeArray[sample[track]]; } } }
public MovieExtendsBox(MovieMetadataBox inParent) : base(BoxTypes.MovieExtends) { parent = inParent; }
public TrackBox(MovieMetadataBox inParent, IsochronousTrackInfo trackInfo) : this(trackInfo) { parent = inParent; }
public TrackBox(MovieMetadataBox inParent, uint movieTScale) : this() { parent = inParent; movieTimeScale = movieTScale; }
public override void LazyRead(int requestedBoxCount) { //this.m_reader.BaseStream.Seek(0L, SeekOrigin.Begin); BoxType boxType; while (this.m_reader.BaseStream.Position < this.m_reader.BaseStream.Length) { boxType = this.m_reader.PeekNextBoxType(); if (boxType == BoxTypes.MovieFragment) { IsMediaStreamFragmented = true; break; // don't process fragment here, do it in the ISMV class (which is derived from this one) } else if (boxType == BoxTypes.FileType) { ftb = new FileTypeBox(); ftb.Read(this.m_reader); Hints.CompatibleBrands = ftb.CompatibleBrands; } else if (boxType == BoxTypes.Movie) { mmb = new MovieMetadataBox(); mmb.Read(this.m_reader); if (mmb.ObjectDescriptorBox != null) { base.ObjectDescriptor = mmb.ObjectDescriptorBox.Contents; } if (mmb.UserDataBox != null) { base.UserData = mmb.UserDataBox.Data; } } else if (boxType == BoxTypes.Free) { FreeBox freeb = new FreeBox(); freeb.Read(this.m_reader); FreeBoxList.Add(freeb); } else if (boxType == BoxTypes.MediaData) // mdat { MediaDataBox mdb = new MediaDataBox(); mdb.Read(this.m_reader); // this doesn't really read all of mdat: payload is skipped MediaDataBoxList.Add(mdb); } else if (boxType == BoxTypes.MovieFragmentRandomAccess) { MovieFragmentRandomAccessBox = new MovieFragmentRandomAccessBox(); MovieFragmentRandomAccessBox.Read(this.m_reader); } else if (boxType == BoxTypes.Free) { FreeBox freeBox = new FreeBox(); freeBox.Read(this.m_reader); FreeBoxList.Add(freeBox); } else { // invalid box, just stop reading break; //Box box2 = new Box(boxType); //box2.Read(this.m_reader); //FreeBoxList.Add(box2); //Debug.WriteLine(string.Format("Unknown BoxType: {0}", box2.Type.ToString())); } } // end of while // now that we know all about the input file in memory... fill a few structures to help others gain access to this information... // this is for the case in which the mp4 file contains moov boxes (MovieMetadataBoxes). if ((mmb != null) && (MediaTracks.Count == 0)) { DurationIn100NanoSecs = (ulong)TimeArithmetic.ConvertToStandardUnit(mmb.MovieHeaderBox.TimeScale, mmb.MovieHeaderBox.Duration); Hints.StreamTimeScale = mmb.MovieHeaderBox.TimeScale; if (!IsMediaStreamFragmented) { CreateTracks <GenericAudioTrack, MP4VideoTrack, MP4TrackFormat>(); } } } // end of Read method
public void InitializeForWriting(List<IsochronousTrackInfo> mediaTracks) { string[] brands = new string[3]; brands[0] = "isml"; brands[1] = "piff"; brands[2] = "iso2"; this.ftb = new FileTypeBox(brands); // overwrite base class's ftb this.ftb.MinorVersion = 1; uint[] matrix = new uint[9]; matrix[0] = 0x10000; // 1.0 matrix[4] = 0x10000; // 1.0 matrix[8] = 0x40000000; // 1.0 (see description of RenderMatrix class) this.mmb = new MovieMetadataBox(mediaTracks, 1.0f, 1.0f, matrix); //CreateTracksForWriting<ISMVTrackFormat>(mediaTracks); // create tracks with ISMVTrackFormat // we can finalize the ftyp and moov boxes here, because they shouldn't change when moofs (fragments) are added //this.ftb.Write(m_writer); this.mmb.FinalizeBox(); //this.mmb.Write(m_writer); //this.CurrMDatOffset = this.ftb.Size + this.mmb.Size; // for fragmented files, CurrMDatOffset is really the file offset InitializeForWriting(mediaTracks); // create our tracks (partial moov boxes, which should still exist, even for fragmented tracks) }