public MediaBox(IsochronousTrackInfo trackInfo) : this() { ulong scaledDuration = (ulong)TimeArithmetic.ConvertToTimeScale(trackInfo.TimeScale, trackInfo.DurationIn100NanoSecs); MediaHeaderBox = new MediaHeaderBox(this, scaledDuration, trackInfo.TimeScale); this.Size += MediaHeaderBox.Size; Codec codec = null; if (trackInfo.GetType() == typeof(RawAudioTrackInfo)) { RawAudioTrackInfo audioInfo = (RawAudioTrackInfo)trackInfo; codec = new Codec(CodecTypes.Audio); codec.PrivateCodecData = audioInfo.CodecPrivateData; } else if (trackInfo.GetType() == typeof(RawVideoTrackInfo)) { RawVideoTrackInfo videoInfo = (RawVideoTrackInfo)trackInfo; codec = new Codec(CodecTypes.Video); codec.PrivateCodecData = videoInfo.CodecPrivateData; } HandlerReferenceBox = new HandlerReferenceBox(this, codec); this.Size += HandlerReferenceBox.Size; MediaInformationBox = new MediaInformationBox(this, trackInfo); // MediaInformationBox.Size is indeterminate at this time; it is determined only during SampleTableBox.FinalizeBox }
public TrackBox(IsochronousTrackInfo trackInfo) : this() { float height = 0.0f; float width = 0.0f; if (trackInfo is RawVideoTrackInfo) { // set the TRACK width, which may differ from SampleDescription width and height, depending on Aspect Ratio RawVideoTrackInfo rvti = (RawVideoTrackInfo)trackInfo; height = rvti.Height; width = rvti.Width * ((float)rvti.AspectRatioX / (float)rvti.AspectRatioY); } ulong scaledDuration = (ulong)TimeArithmetic.ConvertToTimeScale(trackInfo.MovieTimeScale, trackInfo.DurationIn100NanoSecs); TrackHeaderBox = new TrackHeaderBox((uint)trackInfo.TrackID, scaledDuration, height, width); // TrackHeaderBox = new TrackHeaderBox((uint)trackInfo.TrackID, (trackInfo.Duration * oneSecondTicks) / trackInfo.TimeScale, height, width); this.Size += TrackHeaderBox.Size; // skip the TrackReferenceBox for now //TrackReferenceBox = new TrackReferenceBox((uint)trackInfo.TrackID); //this.Size += TrackReferenceBox.Size; #if EDTS_OUT EdtsBox = (EdtsBox)trackInfo.GetEdtsBox(); if (EdtsBox != null) { this.Size += EdtsBox.Size; EdtsBox.ScaleToTarget(trackInfo.MovieTimeScale, trackInfo.TimeScale); } #endif MediaBox = new MediaBox(trackInfo); // MediaBox.Size can only be determined during FinalizeBox // NOTE: NO Esds Box }
/// <summary> /// QBoxTrackFormat /// Constructor to use when writing out to a stream. /// </summary> /// <param name="trackInfo"></param> public QBoxTrackFormat(IsochronousTrackInfo trackInfo) : this() { _qBoxes = new List <QBox>(); firstQB = new QBox(trackInfo); CodecTypes codecType = (trackInfo.HandlerType == "Audio") ? CodecTypes.Audio : (trackInfo.HandlerType == "Video") ? CodecTypes.Video : CodecTypes.Unknown; Codec = new Codec(codecType); Codec.PrivateCodecData = trackInfo.CodecPrivateData; DurationIn100NanoSecs = trackInfo.DurationIn100NanoSecs; }
/// <summary> /// Constructor to use when building the box from scratch. /// NOTE: We don't compute the Size of this box in this constructor. /// The Size of this box is computed during FinalizeBox. /// NOTE: The ordering of the sub-boxes is not determined in the constructor. /// Writing out the sub-boxes (see the Write method below) determines the order of sub-boxes. /// </summary> /// <param name="inParent">MediaInformationBox</param> /// <param name="trackInfo">IsochronousTrackInfo</param> public SampleTableBox(MediaInformationBox inParent, IsochronousTrackInfo trackInfo) : this(inParent) { CTTSOut = trackInfo.CTTSOut; fragmented = trackInfo.IsFragment; SampleDescriptionsBox = new SampleDescriptionsBox(this, trackInfo); DecodingTimeToSampleBox = new DecodingTimeToSampleBox(this); SampleToChunkBox = new SampleToChunkBox(this); SampleSizeBox = new SampleSizeBox(this); ChunkOffSetBox = new ChunkOffSetBox(this); if ((trackInfo is RawVideoTrackInfo) && !fragmented) { SyncSampleMapBox = new SyncSampleMapBox(); //CompositionTimeToSample = new CompositionTimeToSample(this); } }
public MediaInformationBox(MediaBox inParent, IsochronousTrackInfo trackInfo) : this(inParent) { if (trackInfo.GetType() == typeof(RawAudioTrackInfo)) { SoundMediaHeaderBox = new SoundMediaHeaderBox(); this.Size += SoundMediaHeaderBox.Size; } else if (trackInfo.GetType() == typeof(RawVideoTrackInfo)) { VideoMediaHeaderBox = new VideoMediaHeaderBox(); this.Size += VideoMediaHeaderBox.Size; } DataInformationBox = new DataInformationBox(); this.Size += DataInformationBox.Size; SampleTableBox = new SampleTableBox(this, trackInfo); // Size for SampleTableBox is determined only during SampleTableBox.FinalizeBox }
public GenericRecodeWRC(IMediaStream srcStream, IMediaStream destStream, int videoTrackID, TracksIncluded audioOrVideoOnly = TracksIncluded.Both, bool cttsOut = false) : base(srcStream, destStream) { audioOrVideoOrBoth = audioOrVideoOnly; CTTSOut = cttsOut; //Common.Logger.Instance.Info("[GenericRecodeWRC::Ctor] srcStream [" + srcStream.GetType().Name + "], destStream [" + destStream.GetType().Name + "], videoTrackId [" + videoTrackID + "]"); // get characteristics of input stream, and set FetchNextBlock callback on each track. TrackInfo = IsochronousTrackInfo.GetTrackCharacteristics(SourceStream, audioOrVideoOrBoth, videoTrackID); if ((!TrackInfo.Any(t => t is RawVideoTrackInfo)) && (audioOrVideoOnly != TracksIncluded.Audio)) { throw new ArgumentOutOfRangeException("Video track specified does not exist"); } AdjustTrackSpecsToDestination(); // adjust recode params according to output // setup destination stream here (initialize headers in output tracks) DestStream.InitializeForWriting(TrackInfo); }
/// <summary> /// Constructor to use for recoding, with standard RawBaseTrackInfo input. /// Use this to create the very first qbox, which should be a QMed box. /// </summary> /// <param name="trackInfo">RawBaseTrackInfo</param> public QBox(IsochronousTrackInfo trackInfo) : this(trackInfo.CodecPrivateData.Length/2, 0, 0UL, trackInfo.HandlerType, QBOX_SAMPLE_FLAGS_QMED_PRESENT) { this.mSample.privateCodecData = HEAACWaveInfo.HexString2Bytes(trackInfo.CodecPrivateData); this.mSampleFlags |= QBOX_SAMPLE_FLAGS_CONFIGURATION_INFO | QBOX_SAMPLE_FLAGS_SYNC_POINT; if (trackInfo.HandlerType == "Audio") { QMed.QMedAAC qmedaac = (QMed.QMedAAC)this.mSample.qmed; RawAudioTrackInfo audioInfo = (RawAudioTrackInfo)trackInfo; qmedaac.channels = (uint)audioInfo.ChannelCount; qmedaac.majorMediaType = QMed.QMED_MAJOR_MEDIA_TYPE_AAC; qmedaac.minorMediaType = 0; qmedaac.payloadSize = 0; qmedaac.sampleSize = (uint)audioInfo.SampleSize; qmedaac.samplingFrequency = (uint)audioInfo.SampleRate; qmedaac.version = 0; } else if (trackInfo.HandlerType == "Video") { RawVideoTrackInfo videoInfo = (RawVideoTrackInfo)trackInfo; this.mSample.v.height = (ulong)videoInfo.Height; this.mSample.v.width = (ulong)videoInfo.Width; this.mSample.v.frameticks = videoInfo.TimeScale; //QMed.QMedH264 qmedh264 = (QMed.QMedH264)this.mSample.qmed; //RawVideoTrackInfo videoInfo = (RawVideoTrackInfo)trackInfo; //qmedh264.height = (ulong)videoInfo.Height; //qmedh264.majorMediaType = QMed.QMED_MAJOR_MEDIA_TYPE_H264; //qmedh264.minorMediaType = 0; //qmedh264.sampleTicks = videoInfo.TimeScale; //qmedh264.version = 0; //qmedh264.width = (ulong)videoInfo.Width; } }
public TrackBox(MovieMetadataBox inParent, IsochronousTrackInfo trackInfo) : this(trackInfo) { parent = inParent; }
/// <summary> /// QBoxTrackFormat /// Constructor to use when writing out to a stream. /// </summary> /// <param name="trackInfo"></param> public QBoxTrackFormat(IsochronousTrackInfo trackInfo) : this() { _qBoxes = new List<QBox>(); firstQB = new QBox(trackInfo); CodecTypes codecType = (trackInfo.HandlerType == "Audio") ? CodecTypes.Audio : (trackInfo.HandlerType == "Video") ? CodecTypes.Video : CodecTypes.Unknown; Codec = new Codec(codecType); Codec.PrivateCodecData = trackInfo.CodecPrivateData; DurationIn100NanoSecs = trackInfo.DurationIn100NanoSecs; }
public SampleDescriptionsBox(SampleTableBox inParent, IsochronousTrackInfo trackInfo) : this(inParent) { EntryCount = 1; // FIXME: assume only one sample entry Entries = new SampleEntry[EntryCount]; this.Size += 4UL; BoxType btype; if (trackInfo is RawAudioTrackInfo) { RawAudioTrackInfo rati = (RawAudioTrackInfo)trackInfo; switch (rati.PayloadType) { case AudioPayloadType.aac: case AudioPayloadType.mp4a: btype = BoxTypes.Mp4a; break; case AudioPayloadType.wma: btype = BoxTypes.Wma; break; case AudioPayloadType.samr: // 3gp audio btype = BoxTypes.Samr; break; default: throw new Exception(string.Format("Unknown audio track payload type: {0}", rati.PayloadType)); } //btype = (rati.PayloadType == AudioPayloadType.wma) ? BoxTypes.Wma : ((rati.PayloadType == AudioPayloadType.mp4a) ? BoxTypes.Mp4a : BoxTypes.AudioSampleEntry); Entries[0] = new AudioSampleEntry(btype, (RawAudioTrackInfo)trackInfo); this.Size += Entries[0].Size; } else if (trackInfo is RawVideoTrackInfo) { RawVideoTrackInfo rvti = (RawVideoTrackInfo)trackInfo; switch (rvti.PayloadType) { case VideoPayloadType.vc1: btype = BoxTypes.Vc1; break; case VideoPayloadType.mp4v: btype = BoxTypes.Mp4v; break; case VideoPayloadType.mjpeg: btype = BoxTypes.VisualSampleEntry; // FIXME: this is not correct break; case VideoPayloadType.jpeg: btype = BoxTypes.VisualSampleEntry; // FIXME: this is not correct break; case VideoPayloadType.avc1: btype = BoxTypes.Avc1; break; default: btype = BoxTypes.Any; break; } Entries[0] = new VisualSampleEntry(btype, (RawVideoTrackInfo)trackInfo); this.Size += Entries[0].Size; } else //Entries[0] = new UnknownEntry(BoxTypes.UnknownSampleEntry); throw new Exception("unknown track type"); // error out instead of constructing an unknwon entry }