/// <summary> /// Downloads the frame from the hardware into a software frame if possible. /// The input hardware frame gets freed and the return value will point to the new software frame. /// </summary> /// <param name="codecContext">The codec context.</param> /// <param name="input">The input frame coming from the decoder (may or may not be hardware).</param> /// <param name="isHardwareFrame">if set to <c>true</c> [comes from hardware] otherwise, hardware decoding was not performed.</param> /// <returns> /// The frame downloaded from the device into RAM. /// </returns> /// <exception cref="Exception">Failed to transfer data to output frame.</exception> public AVFrame *ExchangeFrame(AVCodecContext *codecContext, AVFrame *input, out bool isHardwareFrame) { isHardwareFrame = false; if (codecContext->hw_device_ctx == null) { return(input); } isHardwareFrame = true; if (input->format != (int)PixelFormat) { return(input); } var output = MediaFrame.CreateAVFrame(); var result = ffmpeg.av_hwframe_transfer_data(output, input, 0); ffmpeg.av_frame_copy_props(output, input); if (result < 0) { MediaFrame.ReleaseAVFrame(output); throw new MediaContainerException("Failed to transfer data to output frame"); } MediaFrame.ReleaseAVFrame(input); return(output); }
/// <inheritdoc /> protected override MediaFrame CreateFrameSource(IntPtr framePointer) { // Validate the audio frame var frame = (AVFrame *)framePointer; if (framePointer == IntPtr.Zero || frame->channels <= 0 || frame->nb_samples <= 0 || frame->sample_rate <= 0) { return(null); } // Init the filter graph for the frame InitializeFilterGraph(frame); AVFrame *outputFrame; // Filter Graph can be changed by issuing a ChangeMedia command if (FilterGraph != null) { // Allocate the output frame outputFrame = MediaFrame.CloneAVFrame(frame); var result = ffmpeg.av_buffersrc_add_frame(SourceFilter, outputFrame); while (result >= 0) { result = ffmpeg.av_buffersink_get_frame_flags(SinkFilter, outputFrame, 0); } if (outputFrame->nb_samples <= 0) { // If we don't have a valid output frame simply release it and // return the original input frame MediaFrame.ReleaseAVFrame(outputFrame); outputFrame = frame; } else { // the output frame is the new valid frame (output frame). // theretofore, we need to release the original MediaFrame.ReleaseAVFrame(frame); } } else { outputFrame = frame; } // Check if the output frame is valid if (outputFrame->nb_samples <= 0) { return(null); } var frameHolder = new AudioFrame(outputFrame, this); return(frameHolder); }
/// <inheritdoc /> protected override MediaFrame CreateFrameSource(IntPtr framePointer) { // Validate the video frame var frame = (AVFrame *)framePointer; if (framePointer == IntPtr.Zero || frame->width <= 0 || frame->height <= 0) { return(null); } // Move the frame from hardware (GPU) memory to RAM (CPU) if (HardwareAccelerator != null) { frame = HardwareAccelerator.ExchangeFrame(CodecContext, frame, out var isHardwareFrame); IsUsingHardwareDecoding = isHardwareFrame; } // Init the filter graph for the frame InitializeFilterGraph(frame); AVFrame *outputFrame; // Changes in the filter graph can be applied by calling the ChangeMedia command if (FilterGraph != null) { // Allocate the output frame outputFrame = MediaFrame.CloneAVFrame(frame); var result = ffmpeg.av_buffersrc_add_frame(SourceFilter, outputFrame); while (result >= 0) { result = ffmpeg.av_buffersink_get_frame_flags(SinkFilter, outputFrame, 0); } if (outputFrame->width <= 0 || outputFrame->height <= 0) { // If we don't have a valid output frame simply release it and // return the original input frame MediaFrame.ReleaseAVFrame(outputFrame); outputFrame = frame; } else { // the output frame is the new valid frame (output frame). // therefore, we need to release the original MediaFrame.ReleaseAVFrame(frame); } } else { outputFrame = frame; } // Check if the output frame is valid if (outputFrame->width <= 0 || outputFrame->height <= 0) { return(null); } // Create the frame holder object and return it. return(new VideoFrame(outputFrame, this)); }