public VideoScalingStream(IVideoStream source, int width, int height, PixelFormat pixelFormat) { if (source == null) { throw new ArgumentNullException("source"); } m_source = source; m_width = width; m_height = height; m_pixelFormat = pixelFormat; m_scalingContext = FFmpeg.sws_getContext(source.Width, source.Height, source.PixelFormat, width, height, pixelFormat, SwsFlags.Bicubic, null, null, null); if (m_scalingContext == null) { throw new DecoderException("Error getting scaling context"); } if (FFmpeg.avpicture_alloc(out m_outPict, this.PixelFormat, this.Width, this.Height) != 0) { throw new DecoderException("Error allocating AVPicture"); } m_outPictAllocated = true; }
public unsafe FrameConverter(int srcWidth, int srcHeight, AVPixelFormat srcFormat, int dstWidth, int dstHeight, AVPixelFormat dstFormat) { SwsContext *ctx = ffmpeg.sws_getContext( srcW: srcWidth, srcH: srcHeight, srcFormat: srcFormat, dstW: dstWidth, dstH: dstHeight, dstFormat: dstFormat, flags: ffmpeg.SWS_BICUBIC, srcFilter: null, dstFilter: null, param: null); if ((IntPtr)ctx == IntPtr.Zero) { throw new FfmpegException("Cannot initialize the conversion context."); } try { _result = FfmpegTools.allocFrame(dstFormat, dstWidth, dstHeight); } catch { ffmpeg.sws_freeContext(ctx); throw; } _ctx = ctx; }
public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat) { _srcSize = sourceSize; _dstSize = destinationSize; _srcPixelFormat = sourcePixelFormat; _dstPixelFormat = destinationPixelFormat; _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) { throw new ApplicationException("Could not initialize the conversion context."); } var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); _dstData = new byte_ptrArray4(); _dstLinesize = new int_array4(); ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); logger.LogDebug($"Successfully initialised ffmpeg based image converted for {sourceSize}:{sourcePixelFormat}->{_dstSize}:{_dstPixelFormat}."); }
public FrameDecoder(RawFrame sampleSource, int targetWidth, int targetHeight, AVPixelFormat targetFormat, int targetBufferSize, int algorithm = ffmpeg.SWS_BICUBIC, int align = 1) { var actualTargetBufferSize = ffmpeg.av_image_get_buffer_size(targetFormat, targetWidth, targetHeight, align); if (actualTargetBufferSize != targetBufferSize) { throw new ArgumentOutOfRangeException(nameof(targetBufferSize), FormattableString.Invariant($"Incorrect buffer size - expected {actualTargetBufferSize}")); } var format = ImageConvert.ConvertFormat(sampleSource.Format); var converterContext = ffmpeg.sws_getContext(sampleSource.Width, sampleSource.Height, format, targetWidth, targetHeight, targetFormat, algorithm, srcFilter: null, dstFilter: null, param: null); try { _converterContext = converterContext; _targetBufferSize = targetBufferSize; _targetWidth = targetWidth; _targetHeight = targetHeight; _targetFormat = targetFormat; _align = align; } catch (Exception) { ffmpeg.sws_freeContext(converterContext); throw; } }
public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat) { _destinationSize = destinationSize; _pConvertContext = ffmpeg.sws_getContext( sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) { Debug.WriteLine("Could not initialize the conversion context."); } var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size( destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); _dstData = new byte_ptrArray4(); _dstLinesize = new int_array4(); ffmpeg.av_image_fill_arrays( ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); }
public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat) { if (sourceSize.Width == 0 || sourceSize.Height == 0 || sourcePixelFormat == AVPixelFormat.AV_PIX_FMT_NONE) { throw new InvalidOperationException($"Invalid source: {sourceSize} [{sourcePixelFormat}]"); } _destinationSize = destinationSize; _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) { throw new ApplicationException("Could not initialize the conversion context."); } var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); _dstData = new byte_ptrArray4(); _dstLinesize = new int_array4(); ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); }
public void ConvertToFormat(AVPixelFormat pixelFormat) { if (codecContext->pix_fmt == pixelFormat) { return; } format = (int)pixelFormat; convertedFrame = ffmpeg.av_frame_alloc(); convertedFrame->format = format; convertedFrame->width = codecContext->width; convertedFrame->height = codecContext->height; convertedFrame->channels = 4; int swsFlags = 0; // fixes weird artifacts at the edges for misaligned dimensions if (codecContext->width % 8 != 0 || codecContext->height % 8 != 0) { swsFlags |= ffmpeg.SWS_ACCURATE_RND; } swsContext = ffmpeg.sws_getContext( codecContext->width, codecContext->height, codecContext->pix_fmt, convertedFrame->width, convertedFrame->height, pixelFormat, swsFlags, null, null, null); // allocate buffers in frame if (ffmpeg.av_frame_get_buffer(convertedFrame, 1) != 0) { throw new ApplicationException("Failed to allocate buffers for frame"); } }
public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat) { _destinationSize = destinationSize; _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) { throw new ApplicationException("Could not initialize the conversion context."); } var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); _dstLinesize = dstLinesize.ToArray(); _dstData = dstData.ToArray(); for (uint i = 0; i < 4; i++) { dstData[i] = null; } ffmpeg.av_free(&dstLinesize); }
public void Encode(Image <Bgra32> image, long ptsMilliseconds) { _swsContext = ffmpeg.sws_getCachedContext(_swsContext, image.Width, image.Height, AVPixelFormat.AV_PIX_FMT_BGRA, _pCodecContext->width, _pCodecContext->height, _pCodecContext->pix_fmt, ffmpeg.SWS_POINT, null, null, null); if (_swsContext == null) { throw new InvalidOperationException("Failed to create or update the SWS context."); } if (!image.DangerousTryGetSinglePixelMemory(out var pixelData)) { throw new InvalidOperationException("Cannot get raw pixel data of frame"); } fixed(Bgra32 *ptr = pixelData.Span) { var data = new byte *[4] { (byte *)ptr, null, null, null }; var linesize = new int[4] { 4 * image.Width, 0, 0, 0 }; ffmpeg.sws_scale(_swsContext, data, linesize, 0, image.Height, _frame->data, _frame->linesize) .ThrowExceptionIfError(); } // Set presentation time _frame->pts = ptsMilliseconds; Encode(_frame); }
public void OnVideoPacket(byte[] payload) { unsafe { fixed(byte *payloadBuf = payload) { AVPacket avpkt; avpkt.size = payload.Length; avpkt.data = payloadBuf; AVFrame *frame = ffmpeg.av_frame_alloc(); AVFrame *frameRGB = ffmpeg.av_frame_alloc(); int ret = ffmpeg.avcodec_send_packet(avctx, &avpkt); if (ret < 0) { throw new Exception("send_packet: " + ret); } while (true) { ret = ffmpeg.avcodec_receive_frame(avctx, frame); if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN)) // needs more input for more frames { break; } if (ret < 0) { throw new Exception("receive_frame: " + ret); } int stride = 3 * frame->width; byte[] imgData = new byte[stride * frame->height]; fixed(byte *dataPtr = imgData) { SwsContext *ctx = ffmpeg.sws_getContext(frame->width, frame->height, AVPixelFormat.AV_PIX_FMT_YUV420P, frame->width, frame->height, AVPixelFormat.AV_PIX_FMT_RGB24, 0, null, null, null); ffmpeg.sws_scale(ctx, frame->data, frame->linesize, 0, frame->height, new byte *[] { dataPtr }, new int[] { stride }); } BitmapSource bitmap = BitmapSource.Create(frame->width, frame->height, 96, 96, PixelFormats.Rgb24, null, imgData, stride); bitmap.Freeze(); using (var fileStream = new FileStream("test.png", FileMode.Create)) { BitmapEncoder encoder = new PngBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(bitmap)); encoder.Save(fileStream); } Dispatcher.Invoke(() => { OutputImage.Source = bitmap; }); } ffmpeg.av_frame_free(&frame); ffmpeg.av_frame_free(&frameRGB); } } }
private void InitializeVideo() { // Extract pixel format and codec id var inputCodecContext = *(InputVideoStream->codec); var inputPixelFormat = inputCodecContext.pix_fmt; var inputCodecId = inputCodecContext.codec_id; // Populate basic properties VideoCodec = inputCodecContext.codec_id.ToString(); // Utils.GetAnsiString(new IntPtr(inputCodecContext.codec_name)); VideoBitrate = (int)inputCodecContext.bit_rate; VideoFrameWidth = inputCodecContext.width; VideoFrameHeight = inputCodecContext.height; VideoFrameRate = Convert.ToDecimal(Convert.ToDouble(inputCodecContext.framerate.num) / Convert.ToDouble(inputCodecContext.framerate.den)); VideoFrameLength = VideoFrameRate > 0M ? 1M / VideoFrameRate : 0M; // Get an input decoder for the input codec AVCodec *inputDecoder = ffmpeg.avcodec_find_decoder(inputCodecId); if (inputDecoder == null) { throw new Exception("Unsupported video codec"); } // Create a Software Sacaling context -- this allows us to do fast colorspace conversion VideoResampler = ffmpeg.sws_getContext( VideoFrameWidth, VideoFrameHeight, inputPixelFormat, VideoFrameWidth, VideoFrameHeight, Constants.VideoOutputPixelFormat, (int)ffmpeg.SWS_BILINEAR, null, null, null); if (VideoResampler == null) { throw new Exception("Could not initialize the output conversion context"); } //Create an output codec context. -- We copy the data from the input context and we //then proceed to adjust some output parameters. // Before it said: var outputCodecContext = &inputCodecContext; VideoCodecContext = ffmpeg.avcodec_alloc_context3(inputDecoder); if (ffmpeg.avcodec_copy_context(VideoCodecContext, &inputCodecContext) != Constants.SuccessCode) { throw new Exception("Could not create video output codec context from input"); } if ((inputDecoder->capabilities & (int)ffmpeg.AV_CODEC_CAP_TRUNCATED) == (int)ffmpeg.AV_CODEC_CAP_TRUNCATED) { VideoCodecContext->flags |= (int)ffmpeg.AV_CODEC_FLAG_TRUNCATED; } if (ffmpeg.avcodec_open2(VideoCodecContext, inputDecoder, null) < Constants.SuccessCode) { throw new Exception("Could not open codec"); } // All output frames will have the same length and will be held by the same structure; the Decoder frame holder. DecodedPictureHolder = ffmpeg.av_frame_alloc(); OutputPictureBufferLength = ffmpeg.avpicture_get_size(Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight); }
/// <summary> /// Converts decoded, raw frame data in the frame source into a a usable frame. <br /> /// The process includes performing picture, samples or text conversions /// so that the decoded source frame data is easily usable in multimedia applications /// </summary> /// <param name="input">The source frame to use as an input.</param> /// <param name="output">The target frame that will be updated with the source frame. If null is passed the frame will be instantiated.</param> /// <returns> /// Return the updated output frame /// </returns> /// <exception cref="System.ArgumentNullException">input</exception> internal override MediaBlock MaterializeFrame(MediaFrame input, ref MediaBlock output) { if (output == null) { output = new VideoBlock(); } var source = input as VideoFrame; var target = output as VideoBlock; if (source == null || target == null) { throw new ArgumentNullException($"{nameof(input)} and {nameof(output)} are either null or not of a compatible media type '{MediaType}'"); } // Retrieve a suitable scaler or create it on the fly Scaler = ffmpeg.sws_getCachedContext(Scaler, source.Pointer->width, source.Pointer->height, GetPixelFormat(source.Pointer), source.Pointer->width, source.Pointer->height, OutputPixelFormat, ScalerFlags, null, null, null); // Perform scaling and save the data to our unmanaged buffer pointer var targetBufferStride = ffmpeg.av_image_get_linesize(OutputPixelFormat, source.Pointer->width, 0); var targetStride = new int[] { targetBufferStride }; var targetLength = ffmpeg.av_image_get_buffer_size(OutputPixelFormat, source.Pointer->width, source.Pointer->height, 1); // Ensure proper allocation of the buffer // If there is a size mismatch between the wanted buffer length and the existing one, // then let's reallocate the buffer and set the new size (dispose of the existing one if any) if (target.PictureBufferLength != targetLength) { if (target.PictureBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(target.PictureBuffer); } target.PictureBufferLength = targetLength; target.PictureBuffer = Marshal.AllocHGlobal(target.PictureBufferLength); } var targetScan = new byte_ptrArray8(); targetScan[0] = (byte *)target.PictureBuffer; // The scaling is done here var outputHeight = ffmpeg.sws_scale(Scaler, source.Pointer->data, source.Pointer->linesize, 0, source.Pointer->height, targetScan, targetStride); // We set the target properties target.EndTime = source.EndTime; target.StartTime = source.StartTime; target.BufferStride = targetStride[0]; target.Duration = source.Duration; target.PixelHeight = source.Pointer->height; target.PixelWidth = source.Pointer->width; return(target); }
public H264Encoder(string outputPath, Size frameSize) { ffmpeg.RootPath = Path.Join(TestData.SolutionDir, "ffmpeg", "bin"); Console.WriteLine("FFMPEG version: " + ffmpeg.av_version_info()); _frameSize = frameSize; var codecId = AVCodecID.AV_CODEC_ID_H264; _pCodec = ffmpeg.avcodec_find_encoder(codecId); if (_pCodec == null) { throw new InvalidOperationException("Codec not found."); } _pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec); _pCodecContext->width = frameSize.Width; _pCodecContext->height = frameSize.Height; _pCodecContext->time_base = new AVRational { num = 1, den = 1000 }; _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "superfast", 0); ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError(); _linesizeY = frameSize.Width; _linesizeU = frameSize.Width / 2; _linesizeV = frameSize.Width / 2; _ySize = _linesizeY * frameSize.Height; _uSize = _linesizeU * frameSize.Height / 2; _swsContext = null; // Allocate a frame _frame = ffmpeg.av_frame_alloc(); _frame->width = _pCodecContext->width; _frame->height = _pCodecContext->height; _frame->format = (int)_pCodecContext->pix_fmt; ffmpeg.av_frame_get_buffer(_frame, 32); // Create output context for mp4 AVFormatContext *outputContext; ffmpeg.avformat_alloc_output_context2(&outputContext, null, "mp4", null).ThrowExceptionIfError(); _outputContext = outputContext; ffmpeg.avio_open2(&_outputContext->pb, outputPath, ffmpeg.AVIO_FLAG_WRITE, null, null); // Create video stream in mp4 container _stream = ffmpeg.avformat_new_stream(_outputContext, _pCodec); ffmpeg.avcodec_parameters_from_context(_stream->codecpar, _pCodecContext) .ThrowExceptionIfError(); _stream->sample_aspect_ratio = _pCodecContext->sample_aspect_ratio; _stream->time_base = _pCodecContext->time_base; ffmpeg.avformat_write_header(_outputContext, null); }
private static void FreeScaler() { if (ScalerCtx != null) { ffmpeg.sws_freeContext(ScalerCtx); ScalerCtx = null; } }
protected override void Dispose(bool disposing) { if (ctx == null) { return; } FF.sws_freeContext(ctx); ctx = null; }
public VideoResampler(VideoFormat source, VideoFormat destination, SwsFlags flags = SwsFlags.FastBilinear) { Source = source; Destination = destination; Flags = flags; ctx = FF.sws_getContext( source.Width, source.Height, source.PixelFormat, destination.Width, destination.Height, destination.PixelFormat, flags, null, null, null); }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="alsoManaged"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected override void Dispose(bool alsoManaged) { if (Scaler != null) { RC.Current.Remove(Scaler); ffmpeg.sws_freeContext(Scaler); Scaler = null; } DestroyFiltergraph(); base.Dispose(alsoManaged); }
// Start is called before the first frame update void Start() { RegisterFFmpegBinaries(); // Prepare textures and initial values screenWidth = GetComponent <Camera>().pixelWidth; screenHeight = GetComponent <Camera>().pixelHeight; Debug.Log("Width: " + screenWidth + ", Height: " + screenHeight); // RTP 스트림 할당 this.streamer = new RtpVideoStreamer("rtp://127.0.0.1:9000/test/", screenWidth, screenHeight); // 송신할 프레임 할당 this.srcFrame = ffmpeg.av_frame_alloc(); this.srcFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P; this.srcFrame->width = screenWidth; this.srcFrame->height = screenHeight; ffmpeg.av_frame_get_buffer(this.srcFrame, 32); // 테스트를 위해 RGB24 to YUV420P 변환 컨텍스트 할당 this._convertContext = ffmpeg.sws_getContext( screenWidth, screenHeight, AVPixelFormat.AV_PIX_FMT_RGB24, screenWidth, screenHeight, AVPixelFormat.AV_PIX_FMT_YUV420P, ffmpeg.SWS_BICUBIC, null, null, null); var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_RGB24, (int)screenWidth, (int)screenHeight, 1); this._convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); this._convertDstData = new byte_ptrArray4(); this._convertDstLinesize = new int_array4(); // Set target frame rate (optional) Application.targetFrameRate = frameRate; tempRenderTexture = new RenderTexture(screenWidth, screenHeight, 0); tempTexture2D = new Texture2D(screenWidth, screenHeight, TextureFormat.RGB24, false); frameQueue = new Queue <byte[]>(); frameNumber = 0; captureFrameTime = 1.0f / (float)frameRate; lastFrameTime = Time.time; // Kill the encoder thread if running from a previous execution if (encoderThread != null && (threadIsProcessing || encoderThread.IsAlive)) { threadIsProcessing = false; encoderThread.Join(); } // Start a new encoder thread threadIsProcessing = true; encoderThread = new Thread(EncodeAndSave); encoderThread.Start(); }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="alsoManaged"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected override void Dispose(bool alsoManaged) { base.Dispose(alsoManaged); if (Scaler != null) { ffmpeg.sws_freeContext(Scaler); Scaler = null; } DestroyFiltergraph(); }
unsafe void InitDecoder() { codec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_MPEG4); //codec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_MPEG2VIDEO); if (codec->name == null) { Debug.Print("Error finding encoder!"); } c = ffmpeg.avcodec_alloc_context3(codec); c->bit_rate = bitrate; c->width = vidWidth; c->height = vidHeight; c->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; int ret = ffmpeg.avcodec_open2(c, codec, null); if (ret < 0) { Debug.Print("Could not open codec!"); } frame = ffmpeg.av_frame_alloc(); if (frame == null) { Debug.Print("Could not allocate video frame!"); } frame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P; frame->width = c->width; frame->height = c->height; ret = ffmpeg.av_frame_get_buffer(frame, 32); if (ret < 0) { Debug.Print("Could not allocate video frame data!"); } gbrFrame = ffmpeg.av_frame_alloc(); gbrFrame->format = (int)AVPixelFormat.AV_PIX_FMT_BGR24; gbrFrame->width = vidWidth; gbrFrame->height = vidHeight; ret = ffmpeg.av_frame_get_buffer(gbrFrame, 32); gbr_swctx = ffmpeg.sws_getContext(frame->width, frame->height, (AVPixelFormat)frame->format, gbrFrame->width, gbrFrame->height, (AVPixelFormat)gbrFrame->format, 4, null, null, null); if (gbr_swctx == null) { Debug.Print("Error getting sws context!"); } }
protected void Dispose(bool disposing) { if (m_scalingContext != null) { FFmpeg.sws_freeContext(m_scalingContext); m_scalingContext = null; } if (m_outPictAllocated) { FFmpeg.avpicture_free(ref m_outPict); } }
/// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="alsoManaged"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected override void Dispose(bool alsoManaged) { if (Scaler != null) { RC.Current.Remove(Scaler); ffmpeg.sws_freeContext(Scaler); Scaler = null; } DestroyFiltergraph(); HardwareAccelerator?.DetachDevice(this); base.Dispose(alsoManaged); }
public void Close() { if (decodeThread != null && decodeThread.IsAlive) { stopThread = true; decodeARE.Set(); Utils.EnsureThreadDone(decodeThread); stopThread = false; } if (status == Status.None) { return; } if (demuxer.enabledStreams.Contains(st->index)) { Log($"Closing StreamIndex #{st->index}"); st->discard = AVDiscard.AVDISCARD_ALL; demuxer.enabledStreams.Remove(st->index); } Flush(); if (type == MediaType.Video) { av_buffer_unref(&codecCtx->hw_device_ctx); if (swsCtx != null) { sws_freeContext(swsCtx); swsCtx = null; } } //else if (type == MediaType.Audio) //{ // //fixed (SwrContext** ptr = &swrCtx) swr_free(ptr); //} avcodec_close(codecCtx); if (frame != null) fixed(AVFrame **ptr = &frame) av_frame_free(ptr); if (codecCtx != null) fixed(AVCodecContext **ptr = &codecCtx) avcodec_free_context(ptr); codecCtx = null; decodeARE.Reset(); demuxer = null; st = null; info = null; isEmbedded = false; status = Status.None; }
private void Initialize(int width, int height, AVPixelFormat inFormat) { _initialized = true; _pContext = FFmpegInvoke.sws_getContext(width, height, inFormat, width, height, _pixelFormat, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null); if (_pContext == null) throw new VideoConverterException("Could not initialize the conversion context."); _pCurrentFrame = FFmpegInvoke.avcodec_alloc_frame(); int outputDataSize = FFmpegInvoke.avpicture_get_size(_pixelFormat, width, height); _outputData = new byte[outputDataSize]; fixed (byte* pOutputData = &_outputData[0]) { FFmpegInvoke.avpicture_fill((AVPicture*)_pCurrentFrame, pOutputData, _pixelFormat, width, height); } }
private void prepareFilters() { // only convert if needed if (stream->codec->pix_fmt == AVPixelFormat.AV_PIX_FMT_YUV420P) { convert = false; return; } // 1 = SWS_FAST_BILINEAR // https://www.ffmpeg.org/doxygen/3.1/swscale_8h_source.html#l00056 convCtx = ffmpeg.sws_getContext(stream->codec->width, stream->codec->height, stream->codec->pix_fmt, stream->codec->width, stream->codec->height, AVPixelFormat.AV_PIX_FMT_YUV420P, 1, null, null, null); }
public VideoFrame ConvertFrame(MediaFrame srcFrame) { AVFrame *src = srcFrame; AVFrame *dst = dstFrame; if (pSwsContext == null && !isDisposing) { pSwsContext = ffmpeg.sws_getContext( src->width, src->height, (AVPixelFormat)src->format, DstWidth, DstHeight, DstFormat, SwsFlag, null, null, null); } ffmpeg.sws_scale(pSwsContext, src->data, src->linesize, 0, src->height, dst->data, dst->linesize).ThrowIfError(); return(dstFrame as VideoFrame); }
private unsafe static SwsContext* CreateConversionContext( AVStream* stream, AVPixelFormat destinationPixelFormat) { AVCodecContext* codecContext = stream->codec; SwsContext* convertContext = ffmpeg.sws_getContext( codecContext->width, codecContext->height, codecContext->pix_fmt, codecContext->width, codecContext->height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (convertContext == null) throw new ApplicationException(@"Could not initialize the conversion context"); return convertContext; }
public unsafe VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat) { this._destinationSize = destinationSize; this._pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, 1, null, null, null); if (this._pConvertContext == null) { throw new ApplicationException("Could not initialize the conversion context."); } int convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); this._convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); this._dstData = default(byte_ptrArray4); this._dstLinesize = default(int_array4); ffmpeg.av_image_fill_arrays(ref this._dstData, ref this._dstLinesize, (byte *)(void *)this._convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1); }
public void Close() { if (status == Status.NOTSET) { return; } if (decodeThread.IsAlive) { forcePause = true; Thread.Sleep(20); if (decodeThread.IsAlive) { decodeThread.Abort(); } } if (demuxer.enabledStreams.Contains(st->index)) { st->discard = AVDiscard.AVDISCARD_ALL; demuxer.enabledStreams.Remove(st->index); } Flush(); if (type == Type.Video) { av_buffer_unref(&codecCtx->hw_device_ctx); if (swsCtx != null) { sws_freeContext(swsCtx); swsCtx = null; } } else if (type == Type.Audio) { //fixed (SwrContext** ptr = &swrCtx) swr_free(ptr); } avcodec_close(codecCtx); if (frame != null) fixed(AVFrame **ptr = &frame) av_frame_free(ptr); if (codecCtx != null) fixed(AVCodecContext **ptr = &codecCtx) avcodec_free_context(ptr); codecCtx = null; decodeARE.Reset(); demuxer = null; st = null; info = null; isEmbedded = false; status = Status.NOTSET; }
public VideoFrameConveter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat) { _destinationSize = destinationSize; _pConvertContext = ffmpeg.sws_getContext((int)sourceSize.Width, (int)sourceSize.Height, sourcePixelFormat, (int)destinationSize.Width, (int)destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, (int)destinationSize.Width, (int)destinationSize.Height, 1); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); _dstData = new byte_ptrArray4(); _dstLineSize = new int_array4(); ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, (int)destinationSize.Width, (int)destinationSize.Height, 1); }
public VideoFrameConverter( int sourceWidth, int sourceHeight, AVPixelFormat sourcePixelFormat, int destinationWidth, int destinationHeight, AVPixelFormat destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_RGBA) { _destinationWidth = destinationWidth; _destinationHeight = destinationHeight; _pConvertContext = ffmpeg.sws_getContext( sourceWidth, sourceHeight, sourcePixelFormat, destinationWidth, destinationHeight, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) { throw new FFmpegException("Could not initialize the conversion context."); } var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size( destinationPixelFormat, destinationWidth, destinationHeight, 1); _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize); _dstData = default; _dstLinesize = default; ffmpeg.av_image_fill_arrays( ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationWidth, destinationHeight, 1); }
public VideoScalingStream(IVideoStream source, int width, int height, PixelFormat pixelFormat) { if (source == null) throw new ArgumentNullException("source"); m_source = source; m_width = width; m_height = height; m_pixelFormat = pixelFormat; m_scalingContext = FFmpeg.sws_getContext(source.Width, source.Height, source.PixelFormat, width, height, pixelFormat, SwsFlags.Bicubic, null, null, null); if (m_scalingContext == null) throw new DecoderException("Error getting scaling context"); if (FFmpeg.avpicture_alloc(out m_outPict, this.PixelFormat, this.Width, this.Height) !=0) throw new DecoderException("Error allocating AVPicture"); m_outPictAllocated = true; }
private void InitializeVideo() { // Extract pixel format and codec id var inputCodecContext = *(InputVideoStream->codec); var inputPixelFormat = inputCodecContext.pix_fmt; var inputCodecId = inputCodecContext.codec_id; // Populate basic properties VideoCodec = inputCodecContext.codec_id.ToString(); // Utils.GetAnsiString(new IntPtr(inputCodecContext.codec_name)); VideoBitrate = (int)inputCodecContext.bit_rate; VideoFrameWidth = inputCodecContext.width; VideoFrameHeight = inputCodecContext.height; VideoFrameRate = Convert.ToDecimal(Convert.ToDouble(inputCodecContext.framerate.num) / Convert.ToDouble(inputCodecContext.framerate.den)); VideoFrameLength = VideoFrameRate > 0M ? 1M / VideoFrameRate : 0M; // Get an input decoder for the input codec AVCodec* inputDecoder = ffmpeg.avcodec_find_decoder(inputCodecId); if (inputDecoder == null) throw new Exception("Unsupported video codec"); // Create a Software Sacaling context -- this allows us to do fast colorspace conversion VideoResampler = ffmpeg.sws_getContext( VideoFrameWidth, VideoFrameHeight, inputPixelFormat, VideoFrameWidth, VideoFrameHeight, Constants.VideoOutputPixelFormat, (int)ffmpeg.SWS_BILINEAR, null, null, null); if (VideoResampler == null) throw new Exception("Could not initialize the output conversion context"); //Create an output codec context. -- We copy the data from the input context and we //then proceed to adjust some output parameters. // Before it said: var outputCodecContext = &inputCodecContext; VideoCodecContext = ffmpeg.avcodec_alloc_context3(inputDecoder); if (ffmpeg.avcodec_copy_context(VideoCodecContext, &inputCodecContext) != Constants.SuccessCode) throw new Exception("Could not create video output codec context from input"); if ((inputDecoder->capabilities & (int)ffmpeg.AV_CODEC_CAP_TRUNCATED) == (int)ffmpeg.AV_CODEC_CAP_TRUNCATED) VideoCodecContext->flags |= (int)ffmpeg.AV_CODEC_FLAG_TRUNCATED; if (ffmpeg.avcodec_open2(VideoCodecContext, inputDecoder, null) < Constants.SuccessCode) throw new Exception("Could not open codec"); // All output frames will have the same length and will be held by the same structure; the Decoder frame holder. DecodedPictureHolder = ffmpeg.av_frame_alloc(); OutputPictureBufferLength = ffmpeg.avpicture_get_size(Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight); }
public void Close() { if (!_opened) return; FFmpegInvoke.av_free(_pConvertedFrame); FFmpegInvoke.av_free(_pConvertedFrameBuffer); FFmpegInvoke.sws_freeContext(_pConvertContext); FFmpegInvoke.av_free(_pDecodedFrame); FFmpegInvoke.avcodec_close(_pStream->codec); fixed (AVFormatContext** pFormatContext = &_pFormatContext) { FFmpegInvoke.avformat_close_input(pFormatContext); } _videoClock = 0; _pFormatContext = null; _pStream = null; _pDecodedFrame = null; _pConvertedFrame = null; _pConvertedFrameBuffer = null; _pConvertContext = null; _opened = false; }
public void Open(string FileName) { DecoderConfig.Init(); AVFormatContext* pFormatContext = FFmpegInvoke.avformat_alloc_context(); _pFormatContext = pFormatContext; if (FFmpegInvoke.avformat_open_input(&pFormatContext, FileName, null, null) != 0) throw new Exception("Could not open file"); if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0) throw new Exception("Could not find stream info"); for (int i = 0; i < pFormatContext->nb_streams; i++) { if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO) { _pStream = pFormatContext->streams[i]; break; } } if (_pStream == null) throw new Exception("Could not found video stream"); AVCodecContext codecContext = *(_pStream->codec); codecContext.workaround_bugs = FFmpegInvoke.FF_BUG_AUTODETECT; _frameduration = 1 / q2d(_pStream->r_frame_rate); FrameCount = _pStream->nb_frames; Duration = (float)pFormatContext->duration / FFmpegInvoke.AV_TIME_BASE; Width = codecContext.width; Height = codecContext.height; AVPixelFormat sourcePixFmt = codecContext.pix_fmt; AVCodecID codecId = codecContext.codec_id; var convertToPixFmt = AVPixelFormat.AV_PIX_FMT_RGB24; _pConvertContext = FFmpegInvoke.sws_getContext(Width, Height, sourcePixFmt, Width, Height, convertToPixFmt, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null); if (_pConvertContext == null) throw new Exception("Could not initialize the conversion context"); _pConvertedFrame = (AVPicture*)FFmpegInvoke.avcodec_alloc_frame(); int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, Width, Height); _pConvertedFrameBuffer = (byte*)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize); FFmpegInvoke.avpicture_fill(_pConvertedFrame, _pConvertedFrameBuffer, convertToPixFmt, Width, Height); AVCodec* pCodec = FFmpegInvoke.avcodec_find_decoder(codecId); if (pCodec == null) throw new Exception("Unsupported codec"); if (FFmpegInvoke.avcodec_open2(_pStream->codec, pCodec, null) < 0) throw new Exception("Could not open codec"); _pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame(); _packet = new AVPacket(); fixed (AVPacket* pPacket = &_packet) { FFmpegInvoke.av_init_packet(pPacket); } _opened = true; }
protected void Dispose(bool disposing) { if (m_scalingContext != null) { FFmpeg.sws_freeContext(m_scalingContext); m_scalingContext = null; } if (m_outPictAllocated) FFmpeg.avpicture_free(ref m_outPict); }
/// <summary> /// Releases all managed and unmanaged resources /// </summary> public void Dispose() { if (IsCancellationPending) return; this.IsCancellationPending = true; this.VideoRenderTimer.Stop(); if (this.AudioRenderer != null) { if (this.AudioRenderer.HasInitialized) this.AudioRenderer.Stop(); this.AudioRenderer.Dispose(); this.AudioRenderer = null; } if (MediaFrameExtractorThread != null) { MediaFrameExtractorThread.Join(); MediaFrameExtractorThread = null; } if (MediaFramesExtractedDone != null) { try { MediaFramesExtractedDone.Dispose(); MediaFramesExtractedDone = null; } finally { } } if (PrimaryFramesCache != null) { PrimaryFramesCache.Clear(); PrimaryFramesCache = null; } if (SecondaryFramesCache != null) { SecondaryFramesCache.Clear(); SecondaryFramesCache = null; } if (VideoCodecContext != null) { fixed (AVCodecContext** videoCodecContextRef = &VideoCodecContext) { ffmpeg.avcodec_close(VideoCodecContext); ffmpeg.avcodec_free_context(videoCodecContextRef); VideoCodecContext = null; } } if (AudioCodecContext != null) { fixed (AVCodecContext** audioCodecContextRef = &AudioCodecContext) { ffmpeg.avcodec_close(AudioCodecContext); ffmpeg.avcodec_free_context(audioCodecContextRef); AudioCodecContext = null; } } if (VideoResampler != null) { ffmpeg.sws_freeContext(VideoResampler); VideoResampler = null; } if (AudioResampler != null) { fixed (SwrContext** audioResamplerRef = &AudioResampler) { ffmpeg.swr_close(AudioResampler); ffmpeg.swr_free(audioResamplerRef); AudioResampler = null; } } if (InputFormatContext != null) { fixed (AVFormatContext** inputFormatContextRef = &InputFormatContext) { ffmpeg.avformat_close_input(inputFormatContextRef); ffmpeg.avformat_free_context(InputFormatContext); InputFormatContext = null; } } if (DecodedPictureHolder != null) { ffmpeg.av_free(DecodedPictureHolder); DecodedPictureHolder = null; } if (DecodedWaveHolder != null) { ffmpeg.av_free(DecodedWaveHolder); DecodedWaveHolder = null; } }