/// <summary> /// Initializes a new instance of the <see cref="MediaFrame" /> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> /// <param name="mediaType">Type of the media.</param> internal MediaFrame(AVFrame *pointer, MediaComponent component, MediaType mediaType) : this((void *)pointer, component, mediaType) { var packetSize = pointer->pkt_size; CompressedSize = packetSize > 0 ? packetSize : 0; }
/// <summary> /// Initializes a new instance of the <see cref="MediaFrame" /> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> /// <param name="mediaType">Type of the media.</param> private MediaFrame(void *pointer, MediaComponent component, MediaType mediaType) { InternalPointer = new IntPtr(pointer); StreamTimeBase = component.Stream->time_base; StreamIndex = component.StreamIndex; MediaType = mediaType; }
/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVSubtitle *)InternalPointer; // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - component.Container.MediaStartTimeOffset.Ticks); StartTime = TimeSpan.FromTicks(timeOffset.Ticks + ((long)frame->start_display_time).ToTimeSpan(StreamTimeBase).Ticks); EndTime = TimeSpan.FromTicks(timeOffset.Ticks + ((long)frame->end_display_time).ToTimeSpan(StreamTimeBase).Ticks); Duration = TimeSpan.FromTicks(EndTime.Ticks - StartTime.Ticks); // Extract text strings for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->text != null) { Text.Add(Utils.PtrToStringUTF8(rect->text)); } } // Immediately release the frame as the struct was created in managed memory // Accessing it later will eventually caused a memory access error. if (m_Pointer != null) { ffmpeg.avsubtitle_free(m_Pointer); } m_Pointer = null; InternalPointer = null; }
/// <summary> /// Initializes a new instance of the <see cref="MediaFrame"/> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> protected MediaFrame(AVSubtitle *pointer, MediaComponent component) : this(pointer, component, MediaType.Subtitle) { // TODO: Compressed size is simply an estimate CompressedSize = (int)pointer->num_rects * 256; PresentationTime = Convert.ToInt64(pointer->start_display_time); DecodingTime = pointer->pts; }
/// <summary> /// Initializes a new instance of the <see cref="MediaFrame" /> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> /// <param name="mediaType">Type of the media.</param> protected MediaFrame(AVFrame *pointer, MediaComponent component, MediaType mediaType) : this((void *)pointer, component, mediaType) { var packetSize = pointer->pkt_size; CompressedSize = packetSize > 0 ? packetSize : 0; PresentationTime = pointer->pts; DecodingTime = pointer->pkt_dts; }
/// <summary> /// Initializes a new instance of the <see cref="VideoFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal VideoFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVFrame *)InternalPointer; var repeatFactor = 1d + (0.5d * frame->repeat_pict); var timeBase = ffmpeg.av_guess_frame_rate(component.Container.InputContext, component.Stream, frame); Duration = repeatFactor.ToTimeSpan(new AVRational { num = timeBase.den, den = timeBase.num }); // for video frames, we always get the best effort timestamp as dts and pts might // contain different times. frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; StartTime = frame->pts == ffmpeg.AV_NOPTS_VALUE ? TimeSpan.FromTicks(0) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); // Picture Number and SMTPE TimeCode DisplayPictureNumber = frame->display_picture_number == 0 ? Extensions.ComputePictureNumber(StartTime, Duration, 1) : frame->display_picture_number; CodedPictureNumber = frame->coded_picture_number; SmtpeTimecode = Extensions.ComputeSmtpeTimeCode(component.StartTimeOffset, Duration, timeBase, DisplayPictureNumber); // Process side data such as CC packets for (var i = 0; i < frame->nb_side_data; i++) { var sideData = frame->side_data[i]; // Get the Closed-Caption packets if (sideData->type == AVFrameSideDataType.AV_FRAME_DATA_A53_CC) { // Parse 3 bytes at a time for (var p = 0; p < sideData->size; p += 3) { var packet = new ClosedCaptionPacket(StartTime, sideData->data[p + 0], sideData->data[p + 1], sideData->data[p + 2]); if (packet.PacketType == CCPacketType.NullPad || packet.PacketType == CCPacketType.Unrecognized) { continue; } // at this point, we have valid CC data ClosedCaptions.Add(packet); } continue; } } }
/// <summary> /// Registers the component in this component set. /// </summary> /// <param name="component">The component.</param> /// <exception cref="ArgumentNullException">When component of the same type is already registered</exception> /// <exception cref="NotSupportedException">When MediaType is not supported</exception> /// <exception cref="ArgumentException">When the component is null</exception> internal void AddComponent(MediaComponent component) { lock (ComponentSyncLock) { if (component == null) { throw new ArgumentNullException(nameof(component)); } var errorMessage = $"A component for '{component.MediaType}' is already registered."; switch (component.MediaType) { case MediaType.Audio: if (m_Audio != null) { throw new ArgumentException(errorMessage); } m_Audio = component as AudioComponent; break; case MediaType.Video: if (m_Video != null) { throw new ArgumentException(errorMessage); } m_Video = component as VideoComponent; break; case MediaType.Subtitle: if (m_Subtitle != null) { throw new ArgumentException(errorMessage); } m_Subtitle = component as SubtitleComponent; break; default: throw new NotSupportedException($"Unable to register component with {nameof(MediaType)} '{component.MediaType}'"); } UpdateComponentBackingFields(); } }
/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; var mainOffset = component.Container.Components.Main.StartTime; var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - mainOffset.Ticks); // start_display_time and end_display_time are relative to timeOffset StartTime = TimeSpan.FromMilliseconds(timeOffset.TotalMilliseconds + frame->start_display_time); EndTime = TimeSpan.FromMilliseconds(timeOffset.TotalMilliseconds + frame->end_display_time); Duration = TimeSpan.FromMilliseconds(frame->end_display_time - frame->start_display_time); // Extract text strings TextType = AVSubtitleType.SUBTITLE_NONE; for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->type == AVSubtitleType.SUBTITLE_TEXT) { if (rect->text == null) { continue; } Text.Add(FFInterop.PtrToStringUTF8(rect->text)); TextType = AVSubtitleType.SUBTITLE_TEXT; break; } if (rect->type == AVSubtitleType.SUBTITLE_ASS) { if (rect->ass == null) { continue; } Text.Add(FFInterop.PtrToStringUTF8(rect->ass)); TextType = AVSubtitleType.SUBTITLE_ASS; break; } TextType = rect->type; } }
/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVSubtitle *)InternalPointer; // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - component.Container.MediaStartTimeOffset.Ticks); // start_display_time and end_display_time are relative to timeOffset StartTime = TimeSpan.FromTicks(timeOffset.Ticks + Convert.ToInt64(frame->start_display_time).ToTimeSpan(StreamTimeBase).Ticks); EndTime = TimeSpan.FromTicks(timeOffset.Ticks + Convert.ToInt64(frame->end_display_time).ToTimeSpan(StreamTimeBase).Ticks); Duration = TimeSpan.FromTicks(EndTime.Ticks - StartTime.Ticks); // Extract text strings TextType = AVSubtitleType.SUBTITLE_NONE; for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->type == AVSubtitleType.SUBTITLE_TEXT) { if (rect->text != null) { Text.Add(FFInterop.PtrToStringUTF8(rect->text)); TextType = AVSubtitleType.SUBTITLE_TEXT; break; } } else if (rect->type == AVSubtitleType.SUBTITLE_ASS) { if (rect->ass != null) { Text.Add(FFInterop.PtrToStringUTF8(rect->ass)); TextType = AVSubtitleType.SUBTITLE_ASS; break; } } else { TextType = rect->type; } } }
/// <summary> /// Initializes a new instance of the <see cref="AudioFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal AudioFrame(AVFrame *frame, MediaComponent component) : base(frame, component, MediaType.Audio) { // Compute the start time. frame->pts = frame->best_effort_timestamp; HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; StartTime = frame->pts == ffmpeg.AV_NOPTS_VALUE ? TimeSpan.FromTicks(0) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks); // Compute the audio frame duration Duration = frame->pkt_duration > 0 ? frame->pkt_duration.ToTimeSpan(StreamTimeBase) : TimeSpan.FromTicks(Convert.ToInt64(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate)); // Compute the audio frame end time EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); }
/// <summary> /// Initializes a new instance of the <see cref="VideoFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal VideoFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVFrame *)InternalPointer; // for vide frames, we always get the best effort timestamp as dts and pts might // contain different times. frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); StartTime = frame->pts == FFmpegEx.AV_NOPTS ? TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); var repeatFactor = 1d + (0.5d * frame->repeat_pict); var timeBase = ffmpeg.av_guess_frame_rate(component.Container.InputContext, component.Stream, frame); Duration = repeatFactor.ToTimeSpan(new AVRational { num = timeBase.den, den = timeBase.num }); EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); // Get the Closed-Caption packets for (var i = 0; i < frame->nb_side_data; i++) { var sideData = frame->side_data[i]; if (sideData->type != AVFrameSideDataType.AV_FRAME_DATA_A53_CC) { continue; } // Parse 3 bytes at a time for (var p = 0; p < sideData->size; p += 3) { var packet = new ClosedCaptionPacket(StartTime, sideData->data[p + 0], sideData->data[p + 1], sideData->data[p + 2]); if (packet.PacketType == CCPacketType.NullPad || packet.PacketType == CCPacketType.Unrecognized) { continue; } // at this point, we have valid CC data ClosedCaptions.Add(packet); } } }
/// <summary> /// Initializes a new instance of the <see cref="VideoFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal VideoFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVFrame *)InternalPointer; // for vide frames, we always get the best effort timestamp as dts and pts might // contain different times. frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); StartTime = frame->pts == Utils.FFmpeg.AV_NOPTS ? TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); var repeatFactor = 1d + (0.5d * frame->repeat_pict); var timeBase = ffmpeg.av_guess_frame_rate(component.Container.InputContext, component.Stream, frame); Duration = repeatFactor.ToTimeSpan(new AVRational { num = timeBase.den, den = timeBase.num }); EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); }
/// <summary> /// Initializes a new instance of the <see cref="AudioFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal AudioFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVFrame *)InternalPointer; // Compute the timespans. // We don't use for Audio frames: frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); StartTime = frame->pts == FFmpegEx.AV_NOPTS ? TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); // Compute the audio frame duration if (frame->pkt_duration != 0) { Duration = frame->pkt_duration.ToTimeSpan(StreamTimeBase); } else { Duration = TimeSpan.FromTicks((long)Math.Round(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate, 0)); } EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); }
#pragma warning restore SA1401 // Fields must be private #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="MediaFrame" /> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> internal MediaFrame(void *pointer, MediaComponent component) { InternalPointer = pointer; StreamTimeBase = component.Stream->time_base; StreamIndex = component.StreamIndex; }
/// <summary> /// Initializes a new instance of the <see cref="MediaFrame"/> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> internal MediaFrame(AVSubtitle *pointer, MediaComponent component) : this((void *)pointer, component, MediaType.Subtitle) { // TODO: Compressed size is simply an estimate CompressedSize = (int)pointer->num_rects * 256; }
/// <summary> /// Initializes a new instance of the <see cref="VideoFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal VideoFrame(AVFrame *frame, MediaComponent component) : base(frame, component) { const int AV_TIMECODE_STR_SIZE = 16 + 1; m_Pointer = (AVFrame *)InternalPointer; var repeatFactor = 1d + (0.5d * frame->repeat_pict); var timeBase = ffmpeg.av_guess_frame_rate(component.Container.InputContext, component.Stream, frame); Duration = repeatFactor.ToTimeSpan(new AVRational { num = timeBase.den, den = timeBase.num }); // for video frames, we always get the best effort timestamp as dts and pts might // contain different times. frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame); HasValidStartTime = frame->pts != FFmpegEx.AV_NOPTS; StartTime = frame->pts == FFmpegEx.AV_NOPTS ? TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) : TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks); EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks); DisplayPictureNumber = frame->display_picture_number == 0 ? (int)Math.Round((double)StartTime.Ticks / Duration.Ticks, 0) : 0; CodedPictureNumber = frame->coded_picture_number; // SMTPE timecode calculation var timeCodeInfo = (AVTimecode *)ffmpeg.av_malloc((ulong)Marshal.SizeOf(typeof(AVTimecode))); var startFrameNumber = (int)Math.Round((double)component.StartTimeOffset.Ticks / Duration.Ticks, 0); ffmpeg.av_timecode_init(timeCodeInfo, timeBase, 0, startFrameNumber, null); var isNtsc = timeBase.num == 30000 && timeBase.den == 1001; var frameNumber = isNtsc ? ffmpeg.av_timecode_adjust_ntsc_framenum2(DisplayPictureNumber, (int)timeCodeInfo->fps) : DisplayPictureNumber; var timeCode = ffmpeg.av_timecode_get_smpte_from_framenum(timeCodeInfo, DisplayPictureNumber); var timeCodeBuffer = (byte *)ffmpeg.av_malloc(AV_TIMECODE_STR_SIZE); ffmpeg.av_timecode_make_smpte_tc_string(timeCodeBuffer, timeCode, 1); SmtpeTimecode = Marshal.PtrToStringAnsi(new IntPtr(timeCodeBuffer)); ffmpeg.av_free(timeCodeInfo); ffmpeg.av_free(timeCodeBuffer); // Process side data such as CC packets for (var i = 0; i < frame->nb_side_data; i++) { var sideData = frame->side_data[i]; // Get the Closed-Caption packets if (sideData->type == AVFrameSideDataType.AV_FRAME_DATA_A53_CC) { // Parse 3 bytes at a time for (var p = 0; p < sideData->size; p += 3) { var packet = new ClosedCaptionPacket(StartTime, sideData->data[p + 0], sideData->data[p + 1], sideData->data[p + 2]); if (packet.PacketType == CCPacketType.NullPad || packet.PacketType == CCPacketType.Unrecognized) { continue; } // at this point, we have valid CC data ClosedCaptions.Add(packet); } continue; } } }
/// <summary> /// Computes the main component and backing fields. /// </summary> private void UpdateComponentBackingFields() { var allComponents = new List <MediaComponent>(4); var allMediaTypes = new List <MediaType>(4); if (m_Audio != null) { allComponents.Add(m_Audio); allMediaTypes.Add(MediaType.Audio); } if (m_Video != null) { allComponents.Add(m_Video); allMediaTypes.Add(MediaType.Video); } if (m_Subtitle != null) { allComponents.Add(m_Subtitle); allMediaTypes.Add(MediaType.Subtitle); } m_All = new ReadOnlyCollection <MediaComponent>(allComponents); m_MediaTypes = new ReadOnlyCollection <MediaType>(allMediaTypes); m_Count = allComponents.Count; // Try for the main component to be the video (if it's not stuff like audio album art, that is) if (m_Video != null && m_Audio != null && m_Video.StreamInfo.IsAttachedPictureDisposition == false) { m_Main = m_Video; m_MainMediaType = MediaType.Video; return; } // If it was not video, then it has to be audio (if it has audio) if (m_Audio != null) { m_Main = m_Audio; m_MainMediaType = MediaType.Audio; return; } // Set it to video even if it's attached pic stuff if (m_Video != null) { m_Main = m_Video; m_MainMediaType = MediaType.Video; return; } // As a last resort, set the main component to be the subtitles if (m_Subtitle != null) { m_Main = m_Subtitle; m_MainMediaType = MediaType.Subtitle; return; } // We should never really hit this line m_Main = null; m_MainMediaType = MediaType.None; }
/// <summary> /// Computes the main component and backing fields. /// </summary> private unsafe void UpdateComponentBackingFields() { var allComponents = new List <MediaComponent>(4); var allMediaTypes = new List <MediaType>(4); // assign allMediaTypes. IMPORTANT: Order matters because this // establishes the priority in which playback measures are computed if (m_Video != null) { allComponents.Add(m_Video); allMediaTypes.Add(MediaType.Video); } if (m_Audio != null) { allComponents.Add(m_Audio); allMediaTypes.Add(MediaType.Audio); } if (m_Subtitle != null) { allComponents.Add(m_Subtitle); allMediaTypes.Add(MediaType.Subtitle); } m_All = new ReadOnlyCollection <MediaComponent>(allComponents); m_MediaTypes = new ReadOnlyCollection <MediaType>(allMediaTypes); m_Count = allComponents.Count; // Start with unknown or default playback times m_PlaybackDuration = null; m_PlaybackStartTime = null; // Compute Playback Times -- priority is established by the order // of components in allComponents: audio, video, subtitle // It would be weird to compute playback duration using subtitles foreach (var component in allComponents) { // We don't want this kind of info from subtitles if (component.MediaType == MediaType.Subtitle) { continue; } var startTime = component.Stream->start_time == ffmpeg.AV_NOPTS_VALUE ? TimeSpan.MinValue : component.Stream->start_time.ToTimeSpan(component.Stream->time_base); // compute the duration var duration = (component.Stream->duration == ffmpeg.AV_NOPTS_VALUE || component.Stream->duration <= 0) ? TimeSpan.MinValue : component.Stream->duration.ToTimeSpan(component.Stream->time_base); // Skip the component if not known if (startTime == TimeSpan.MinValue) { continue; } // Set the start time m_PlaybackStartTime = startTime; // Set the duration and end times if we find valid data if (duration != TimeSpan.MinValue && duration.Ticks > 0) { m_PlaybackDuration = component.Duration; } // no more computing playback times after this point break; } // Compute the playback start, end and duration off the media info // if we could not compute it via the components if (m_PlaybackDuration == null && allComponents.Count > 0) { var mediaInfo = allComponents[0].Container?.MediaInfo; if (mediaInfo != null && mediaInfo.Duration != TimeSpan.MinValue && mediaInfo.Duration.Ticks > 0) { m_PlaybackDuration = mediaInfo.Duration; // override the start time if we have valid duration information if (mediaInfo.StartTime != TimeSpan.MinValue) { m_PlaybackStartTime = mediaInfo.StartTime; } } } // Update all of the component start and duration times if not set // using the newly computed information if available foreach (var component in allComponents) { if (component.StartTime == TimeSpan.MinValue) { component.StartTime = m_PlaybackStartTime ?? TimeSpan.Zero; } if (component.Duration == TimeSpan.MinValue && m_PlaybackDuration != null) { component.Duration = m_PlaybackDuration.Value; } } // Try for the main component to be the video (if it's not stuff like audio album art, that is) if (m_Video != null && m_Audio != null && m_Video.StreamInfo.IsAttachedPictureDisposition == false) { m_Main = m_Video; m_MainMediaType = MediaType.Video; return; } // If it was not video, then it has to be audio (if it has audio) if (m_Audio != null) { m_Main = m_Audio; m_MainMediaType = MediaType.Audio; return; } // Set it to video even if it's attached pic stuff if (m_Video != null) { m_Main = m_Video; m_MainMediaType = MediaType.Video; return; } // As a last resort, set the main component to be the subtitles if (m_Subtitle != null) { m_Main = m_Subtitle; m_MainMediaType = MediaType.Subtitle; return; } // We should never really hit this line m_Main = null; m_MainMediaType = MediaType.None; }