/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVSubtitle *)InternalPointer; // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) HasValidStartTime = frame->pts != FFmpegEx.AV_NOPTS; var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - component.Container.MediaStartTimeOffset.Ticks); StartTime = TimeSpan.FromTicks(timeOffset.Ticks + ((long)frame->start_display_time).ToTimeSpan(StreamTimeBase).Ticks); EndTime = TimeSpan.FromTicks(timeOffset.Ticks + ((long)frame->end_display_time).ToTimeSpan(StreamTimeBase).Ticks); Duration = TimeSpan.FromTicks(EndTime.Ticks - StartTime.Ticks); // Extract text strings TextType = AVSubtitleType.SUBTITLE_NONE; for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->type == AVSubtitleType.SUBTITLE_TEXT) { if (rect->text != null) { Text.Add(Utils.PtrToStringUTF8(rect->text)); TextType = AVSubtitleType.SUBTITLE_TEXT; break; } } else if (rect->type == AVSubtitleType.SUBTITLE_ASS) { if (rect->ass != null) { Text.Add(Utils.PtrToStringUTF8(rect->ass)); TextType = AVSubtitleType.SUBTITLE_ASS; break; } } else { TextType = rect->type; } } }
/// <summary> /// Initializes a new instance of the <see cref="AudioFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal AudioFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVFrame *)InternalPointer; // Compute the timespans. // We don't use for Audio frames: frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); HasValidStartTime = frame->pts != FFmpegEx.AV_NOPTS; StartTime = frame->pts == FFmpegEx.AV_NOPTS ? TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); // Compute the audio frame duration if (frame->pkt_duration != 0) { Duration = frame->pkt_duration.ToTimeSpan(StreamTimeBase); } else { Duration = TimeSpan.FromTicks((long)Math.Round(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate, 0)); } EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); }
/// <summary> /// Initializes a new instance of the <see cref="VideoFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal VideoFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { const int AV_TIMECODE_STR_SIZE = 16 + 1; m_Pointer = (AVFrame *)InternalPointer; var repeatFactor = 1d + (0.5d * frame->repeat_pict); var timeBase = ffmpeg.av_guess_frame_rate(component.Container.InputContext, component.Stream, frame); Duration = repeatFactor.ToTimeSpan(new AVRational { num = timeBase.den, den = timeBase.num }); // for video frames, we always get the best effort timestamp as dts and pts might // contain different times. frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); HasValidStartTime = frame->pts != FFmpegEx.AV_NOPTS; StartTime = frame->pts == FFmpegEx.AV_NOPTS ? TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); DisplayPictureNumber = frame->display_picture_number == 0 ? (int)Math.Round((double)StartTime.Ticks / Duration.Ticks, 0) : 0; CodedPictureNumber = frame->coded_picture_number; // SMTPE timecode calculation var timeCodeInfo = (AVTimecode *)ffmpeg.av_malloc((ulong)Marshal.SizeOf(typeof(AVTimecode))); var startFrameNumber = (int)Math.Round((double)component.StartTimeOffset.Ticks / Duration.Ticks, 0); ffmpeg.av_timecode_init(timeCodeInfo, timeBase, 0, startFrameNumber, null); var isNtsc = timeBase.num == 30000 && timeBase.den == 1001; var frameNumber = isNtsc ? ffmpeg.av_timecode_adjust_ntsc_framenum2(DisplayPictureNumber, (int)timeCodeInfo->fps) : DisplayPictureNumber; var timeCode = ffmpeg.av_timecode_get_smpte_from_framenum(timeCodeInfo, DisplayPictureNumber); var timeCodeBuffer = (byte *)ffmpeg.av_malloc(AV_TIMECODE_STR_SIZE); ffmpeg.av_timecode_make_smpte_tc_string(timeCodeBuffer, timeCode, 1); SmtpeTimecode = Marshal.PtrToStringAnsi(new IntPtr(timeCodeBuffer)); ffmpeg.av_free(timeCodeInfo); ffmpeg.av_free(timeCodeBuffer); // Process side data such as CC packets for (var i = 0; i < frame->nb_side_data; i++) { var sideData = frame->side_data[i]; // Get the Closed-Caption packets if (sideData->type == AVFrameSideDataType.AV_FRAME_DATA_A53_CC) { // Parse 3 bytes at a time for (var p = 0; p < sideData->size; p += 3) { var packet = new ClosedCaptionPacket(StartTime, sideData->data[p + 0], sideData->data[p + 1], sideData->data[p + 2]); if (packet.PacketType == CCPacketType.NullPad || packet.PacketType == CCPacketType.Unrecognized) { continue; } // at this point, we have valid CC data ClosedCaptions.Add(packet); } continue; } } }
#pragma warning restore SA1401 // Fields must be private #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="MediaFrame" /> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> internal MediaFrame(void *pointer, MediaComponent component) { InternalPointer = pointer; StreamTimeBase = component.Stream->time_base; StreamIndex = component.StreamIndex; }