public MovieExtendsBox(MovieMetadataBox inParent, List <IsochronousTrackInfo> trackInfos) : this(inParent) { MovieExtendsHeaderBox = new MovieExtendsHeaderBox((uint)0); // initial duration should be zero. prev: trackInfos[0].MovieDuration); this.Size += MovieExtendsHeaderBox.Size; if (TrackExtendBoxes == null) { TrackExtendBoxes = new TrackExtendsBox[trackInfos.Count]; } //TrackBox[] tracks = parent.TrackBoxes; int i = 0; foreach (IsochronousTrackInfo tri in trackInfos) { if (tri.GetType() == typeof(RawAudioTrackInfo)) { RawAudioTrackInfo rati = (RawAudioTrackInfo)tri; TrackExtendBoxes[i] = new TrackExtendsBox((uint)(i + 1), 1, 0, 0, 0); // trackID for audio is 1, sample description index within audio track is 1 this.Size += TrackExtendBoxes[i].Size; } else if (tri.GetType() == typeof(RawVideoTrackInfo)) { RawVideoTrackInfo rvti = (RawVideoTrackInfo)tri; TrackExtendBoxes[i] = new TrackExtendsBox((uint)(i + 1), 1, 0, 0, 0); // trackID for video is 2, sample description index within video track is 1 this.Size += TrackExtendBoxes[i].Size; } i++; } }
public MediaBox(IsochronousTrackInfo trackInfo) : this() { ulong scaledDuration = (ulong)TimeArithmetic.ConvertToTimeScale(trackInfo.TimeScale, trackInfo.DurationIn100NanoSecs); MediaHeaderBox = new MediaHeaderBox(this, scaledDuration, trackInfo.TimeScale); this.Size += MediaHeaderBox.Size; Codec codec = null; if (trackInfo.GetType() == typeof(RawAudioTrackInfo)) { RawAudioTrackInfo audioInfo = (RawAudioTrackInfo)trackInfo; codec = new Codec(CodecTypes.Audio); codec.PrivateCodecData = audioInfo.CodecPrivateData; } else if (trackInfo.GetType() == typeof(RawVideoTrackInfo)) { RawVideoTrackInfo videoInfo = (RawVideoTrackInfo)trackInfo; codec = new Codec(CodecTypes.Video); codec.PrivateCodecData = videoInfo.CodecPrivateData; } HandlerReferenceBox = new HandlerReferenceBox(this, codec); this.Size += HandlerReferenceBox.Size; MediaInformationBox = new MediaInformationBox(this, trackInfo); // MediaInformationBox.Size is indeterminate at this time; it is determined only during SampleTableBox.FinalizeBox }
///// <summary> ///// Copy Constructor ///// </summary> ///// <param name="trak"></param> //public IGenericAudioTrack(IGenericAudioTrack trak) // : base((GenericMediaTrack)trak) //{ // this.Codec.CodecType = CodecTypes.Audio; // this.PayloadType = trak.PayloadType; // this.ChannelCount = trak.ChannelCount; // this.SampleSize = trak.SampleSize; // this.SampleRate = trak.SampleRate; //} public GenericAudioTrack(RawAudioTrackInfo rawAudioInfo) : this() { this.ChannelCount = rawAudioInfo.ChannelCount; this.Codec.PrivateCodecData = rawAudioInfo.CodecPrivateData; this.PayloadType = rawAudioInfo.PayloadType; this.SampleRate = rawAudioInfo.SampleRate; this.SampleSize = rawAudioInfo.SampleSize; }
/// <summary> /// InitializeForWriting /// What we do here is initialize a couple of properties. /// </summary> /// <param name="mediaTracks">This is with the item type of whatever is the source media stream.</param> public virtual void InitializeForWriting(List <IsochronousTrackInfo> mediaTracks) { this.SourceTrackInfo = mediaTracks; if (this.SourceTrackInfo.Any(rbt => rbt.HandlerType == "Video")) { RawVideoTrackInfo rvt = (RawVideoTrackInfo)mediaTracks.First(rbt => rbt.HandlerType == "Video"); this.DurationIn100NanoSecs = rvt.MovieDurationIn100NanoSecs; this.Hints.StreamTimeScale = rvt.MovieTimeScale; } else if (this.SourceTrackInfo.Any(rbt => rbt.HandlerType == "Audio")) { RawAudioTrackInfo rati = (RawAudioTrackInfo)mediaTracks.First(rbt => rbt.HandlerType == "Audio"); this.DurationIn100NanoSecs = rati.MovieDurationIn100NanoSecs; this.Hints.StreamTimeScale = rati.MovieTimeScale; } if (CachingEnabled) { _cacheManager.SetupForWrite(mediaTracks); } }
/// <summary> /// AdjustTrackSpecsToDestination /// The purpose of this is to control the recoding by modifying two instances of the class /// BaseTrackInfo under the two subclasses RawAudioTrackInfo and RawVideoTrackInfo. /// By default, these subclass instances take on the characteristics of the input media. /// NOTE: This method should be XML driven or should pick up parameters from a configuration file. /// FIXME: What about other track types? /// </summary> protected void AdjustTrackSpecsToDestination() { foreach (IsochronousTrackInfo trackDef in TrackInfo) { trackDef.TrackID = 0; // reset track ID (destination stream should determine track ID) if (trackDef is RawVideoTrackInfo) { trackDef.CTTSOut = CTTSOut; } } uint oneSecondTicks = (uint)TimeSpan.TicksPerSecond; if (DestStream.GetType().FullName.Equals("Media.Formats.MP4.MP4StreamWriter")) { TrackInfo.ForEach(delegate(IsochronousTrackInfo trk) { // set the movie time scale to 1,000 trk.MovieTimeScale = 1000; // Set the track time scale to 10,000. // QuickTime cannot handle ulong durations, and our mp4 writer automatically switches to ulong if // a movie is more than 2,166,748,000 units long (a value which goes beyond int.MaxValue). // The track duration can get this high if the time scale is 10,000,000 which is what Expression uses. trk.TimeScale = 10000; if (trk is RawVideoTrackInfo) { trk.CTTSOut = CTTSOut; } else if (trk is RawAudioTrackInfo) { // if we are recoding to MP4 from HyperAsset, private codec data should be set as follows if (SourceStream.GetType().FullName.Contains(".AssetMediaStream")) // This needs to encompass the new AssetMediaStream2 class. { trk.CodecPrivateData = "038080220000000480801640150020000001F4000001F4000580800511900000000680800102"; } } }); } else if (DestStream.IsMediaStreamFragmented) { TrackInfo.ForEach(delegate(IsochronousTrackInfo trk) { if (trk is RawVideoTrackInfo) { // modify RawVideoTrackInfo: for fragmented tracks, timescale should be = oneSecondTicks // rvti.MovieDurationIn100NanoSecs = rvti.MovieDurationIn100NanoSecs * (oneSecondTicks / rvti.MovieTimeScale); // FIXME: what if rvti.MovieTimeScale > oneSecondTicks ticks? trk.MovieTimeScale = oneSecondTicks; //rvti.DurationIn100NanoSecs = rvti.DurationIn100NanoSecs * (oneSecondTicks / rvti.TimeScale); trk.TimeScale = oneSecondTicks; trk.IsFragment = true; } }); } if (TrackInfo.Any(t => t is RawAudioTrackInfo)) { RawAudioTrackInfo rati = (RawAudioTrackInfo)TrackInfo.First(t => t is RawAudioTrackInfo); if ((rati != null) && (audioOrVideoOrBoth == TracksIncluded.Video)) { TrackInfo.Remove(rati); rati = null; } } if (audioOrVideoOrBoth == TracksIncluded.Audio) { IsochronousTrackInfo rvti; do { rvti = TrackInfo.FirstOrDefault(t => t is RawVideoTrackInfo); if (rvti != null) { TrackInfo.Remove(rvti); } } while (rvti != null); } }
public AudioSampleEntry(BoxType inType, RawAudioTrackInfo audioInfo) : base(inType) { ChannelCount = (ushort)audioInfo.ChannelCount; SampleSize = (ushort)audioInfo.SampleSize; SampleRate = (uint)audioInfo.SampleRate; this.Size += 20UL; switch (audioInfo.PayloadType) { case AudioPayloadType.aac: case AudioPayloadType.mp4a: PrivDataFullBox = new AnyPrivFullBox(BoxTypes.Esds, audioInfo.CodecPrivateData); // AAC encoding this.Size += PrivDataFullBox.Size; break; case AudioPayloadType.wma: PrivDataBox = new AnyPrivBox(BoxTypes.Wfex, audioInfo.CodecPrivateData); this.Size += PrivDataBox.Size; break; case AudioPayloadType.samr: // 3gp audio PrivDataFullBox = new AnyPrivFullBox(BoxTypes.Damr, audioInfo.CodecPrivateData); this.Size += PrivDataFullBox.Size; break; default: throw new Exception(string.Format("Unknown audio track payload type: {0}", audioInfo.PayloadType)); } }