/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVSubtitle *)InternalPointer; // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - component.Container.MediaStartTimeOffset.Ticks); StartTime = TimeSpan.FromTicks(timeOffset.Ticks + ((long)frame->start_display_time).ToTimeSpan(StreamTimeBase).Ticks); EndTime = TimeSpan.FromTicks(timeOffset.Ticks + ((long)frame->end_display_time).ToTimeSpan(StreamTimeBase).Ticks); Duration = TimeSpan.FromTicks(EndTime.Ticks - StartTime.Ticks); // Extract text strings for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->text != null) { Text.Add(Utils.PtrToStringUTF8(rect->text)); } } // Immediately release the frame as the struct was created in managed memory // Accessing it later will eventually caused a memory access error. Release(); }
/// <summary> /// Initializes a new instance of the <see cref="MediaFrame"/> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> protected MediaFrame(AVSubtitle *pointer, MediaComponent component) : this(pointer, component, MediaType.Subtitle) { // TODO: Compressed size is simply an estimate CompressedSize = (int)pointer->num_rects * 256; PresentationTime = Convert.ToInt64(pointer->start_display_time); DecodingTime = pointer->pts; }
internal static void ReleaseAVSubtitle(AVSubtitle *frame) { if (frame == null) { return; } ffmpeg.avsubtitle_free(frame); ffmpeg.av_free(frame); }
/// <summary> /// Deallocates the subtitle struct used to create in managed memory. /// </summary> /// <param name="frame">The frame.</param> internal static void DeallocateSubtitle(AVSubtitle *frame) { if (frame == null) { return; } ffmpeg.avsubtitle_free(frame); ffmpeg.av_free(frame); }
/// <summary> /// Releases internal frame /// </summary> protected override void Release() { if (m_Pointer == null) { return; } ffmpeg.avsubtitle_free(m_Pointer); m_Pointer = null; InternalPointer = null; }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> private void Dispose(bool disposing) { if (!IsDisposed) { if (m_Pointer != null) { ffmpeg.avsubtitle_free(m_Pointer); } m_Pointer = null; InternalPointer = null; IsDisposed = true; } }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> private void Dispose(bool disposing) { if (!IsDisposed) { if (m_Pointer != null) { DeallocateSubtitle(m_Pointer); } m_Pointer = null; InternalPointer = null; IsDisposed = true; } }
/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { m_Pointer = (AVSubtitle *)InternalPointer; // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - component.Container.MediaStartTimeOffset.Ticks); // start_display_time and end_display_time are relative to timeOffset StartTime = TimeSpan.FromTicks(timeOffset.Ticks + Convert.ToInt64(frame->start_display_time).ToTimeSpan(StreamTimeBase).Ticks); EndTime = TimeSpan.FromTicks(timeOffset.Ticks + Convert.ToInt64(frame->end_display_time).ToTimeSpan(StreamTimeBase).Ticks); Duration = TimeSpan.FromTicks(EndTime.Ticks - StartTime.Ticks); // Extract text strings TextType = AVSubtitleType.SUBTITLE_NONE; for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->type == AVSubtitleType.SUBTITLE_TEXT) { if (rect->text != null) { Text.Add(FFInterop.PtrToStringUTF8(rect->text)); TextType = AVSubtitleType.SUBTITLE_TEXT; break; } } else if (rect->type == AVSubtitleType.SUBTITLE_ASS) { if (rect->ass != null) { Text.Add(FFInterop.PtrToStringUTF8(rect->ass)); TextType = AVSubtitleType.SUBTITLE_ASS; break; } } else { TextType = rect->type; } } }
/// <summary> /// Initializes a new instance of the <see cref="SubtitleFrame" /> class. /// </summary> /// <param name="frame">The frame.</param> /// <param name="component">The component.</param> internal SubtitleFrame(AVSubtitle *frame, MediaComponent component) : base(frame, component) { // Extract timing information (pts for Subtitles is always in AV_TIME_BASE units) HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE; var mainOffset = component.Container.Components.Main.StartTime; var timeOffset = TimeSpan.FromTicks(frame->pts.ToTimeSpan(ffmpeg.AV_TIME_BASE).Ticks - mainOffset.Ticks); // start_display_time and end_display_time are relative to timeOffset StartTime = TimeSpan.FromMilliseconds(timeOffset.TotalMilliseconds + frame->start_display_time); EndTime = TimeSpan.FromMilliseconds(timeOffset.TotalMilliseconds + frame->end_display_time); Duration = TimeSpan.FromMilliseconds(frame->end_display_time - frame->start_display_time); // Extract text strings TextType = AVSubtitleType.SUBTITLE_NONE; for (var i = 0; i < frame->num_rects; i++) { var rect = frame->rects[i]; if (rect->type == AVSubtitleType.SUBTITLE_TEXT) { if (rect->text == null) { continue; } Text.Add(FFInterop.PtrToStringUTF8(rect->text)); TextType = AVSubtitleType.SUBTITLE_TEXT; break; } if (rect->type == AVSubtitleType.SUBTITLE_ASS) { if (rect->ass == null) { continue; } Text.Add(FFInterop.PtrToStringUTF8(rect->ass)); TextType = AVSubtitleType.SUBTITLE_ASS; break; } TextType = rect->type; } }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="alsoManaged"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> private void Dispose(bool alsoManaged) { lock (DisposeLock) { if (IsDisposed) { return; } if (m_Pointer != null) { ReleaseAVSubtitle(m_Pointer); } m_Pointer = null; InternalPointer = null; IsDisposed = true; } }
public static int ProcessSubsFrame(Decoder decoder, MediaFrame mFrame, AVSubtitle *sub) { int ret = 0; try { string line = ""; byte[] buffer; AVSubtitleRect **rects = sub->rects; AVSubtitleRect * cur = rects[0]; switch (cur->type) { case AVSubtitleType.SUBTITLE_ASS: buffer = new byte[1024]; line = Utils.BytePtrToStringUTF8(cur->ass); break; case AVSubtitleType.SUBTITLE_TEXT: buffer = new byte[1024]; line = Utils.BytePtrToStringUTF8(cur->ass); break; case AVSubtitleType.SUBTITLE_BITMAP: Log("Subtitles BITMAP -> Not Implemented yet"); return(-1); } mFrame.text = SSAtoSubStyles(line, out List <SubStyle> subStyles); mFrame.subStyles = subStyles; mFrame.duration = (int)(sub->end_display_time - sub->start_display_time); //Log("SUBS ......... " + Utils.TicksToTime(mFrame.timestamp)); } catch (Exception e) { ret = -1; Log("Error[" + (ret).ToString("D4") + "], Func: ProcessSubsFrame(), Msg: " + e.Message + " - " + e.StackTrace); } return(ret); }
/// <inheritdoc /> public unsafe void OnSubtitleDecoded(AVSubtitle *subtitle, AVFormatContext *context) => Parent?.RaiseSubtitleDecodedEvent(subtitle, context);
/// <summary> /// Creates a frame source object given the raw FFmpeg subtitle reference. /// </summary> /// <param name="frame">The raw FFmpeg subtitle pointer.</param> /// <returns>The media frame</returns> protected virtual MediaFrame CreateFrameSource(AVSubtitle *frame) { return(null); }
public static extern int avcodec_encode_subtitle(AVCodecContext *avctx, byte *buf, int buf_size, AVSubtitle *sub);
public static extern void avsubtitle_free(AVSubtitle *sub);
/// <summary> /// Initializes a new instance of the <see cref="SubtitleDecodedEventArgs"/> class. /// </summary> /// <param name="subtitle">The subtitle pointer</param> /// <param name="context">The input format context</param> public SubtitleDecodedEventArgs(AVSubtitle *subtitle, AVFormatContext *context) : base(context) { Subtitle = subtitle; }
/// <summary> /// Creates a frame source object given the raw FFmpeg subtitle reference. /// </summary> /// <param name="frame">The raw FFmpeg subtitle pointer.</param> /// <returns>The managed frame</returns> protected override unsafe MediaFrame CreateFrameSource(AVSubtitle *frame) { var frameHolder = new SubtitleFrame(frame, this); return(frameHolder); }
internal unsafe void RaiseSubtitleDecodedEvent(AVSubtitle *subtitle, AVFormatContext *context) => SubtitleDecoded?.Invoke(this, new SubtitleDecodedEventArgs(subtitle, context));
public static extern int avcodec_decode_subtitle2(AVCodecContext *avctx, AVSubtitle *sub, int *got_sub_ptr, AVPacket *avpkt);
/// <summary> /// Initializes a new instance of the <see cref="MediaFrame"/> class. /// </summary> /// <param name="pointer">The pointer.</param> /// <param name="component">The component.</param> internal MediaFrame(AVSubtitle *pointer, MediaComponent component) : this((void *)pointer, component, MediaType.Subtitle) { // TODO: Compressed size is simply an estimate CompressedSize = (int)pointer->num_rects * 256; }