예제 #1
0
        public byte[] ConvertToBuffer(byte[] srcData)
        {
            //int linesz0 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 0);
            //int linesz1 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 1);
            //int linesz2 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 2);

            byte_ptrArray4 src       = new byte_ptrArray4();
            int_array4     srcStride = new int_array4();

            fixed(byte *pSrcData = srcData)
            {
                ffmpeg.av_image_fill_arrays(ref src, ref srcStride, pSrcData, _srcPixelFormat, _srcWidth, _srcHeight, 1).ThrowExceptionIfError();
            }

            ffmpeg.sws_scale(_pConvertContext, src, srcStride, 0, _srcHeight, _dstData, _dstLinesize).ThrowExceptionIfError();

            int outputBufferSize = ffmpeg.av_image_get_buffer_size(_dstPixelFormat, _dstWidth, _dstHeight, 1);

            byte[] outputBuffer = new byte[outputBufferSize];

            fixed(byte *pOutData = outputBuffer)
            {
                ffmpeg.av_image_copy_to_buffer(pOutData, outputBufferSize, _dstData, _dstLinesize, _dstPixelFormat, _dstWidth, _dstHeight, 1)
                .ThrowExceptionIfError();
            }

            return(outputBuffer);
        }
예제 #2
0
        public CFrameConverter(Size FrameSize, AVPixelFormat pix_fmt)
        {
            this.FrameSize = FrameSize;
            if (pix_fmt != CVPxfmt)
            {
                convert_context = ffmpeg.sws_getContext(
                    FrameSize.Width,
                    FrameSize.Height,
                    pix_fmt,
                    FrameSize.Width,
                    FrameSize.Height,
                    CVPxfmt,
                    ffmpeg.SWS_FAST_BILINEAR, null, null, null);
                this.IsConvert = true;
                if (convert_context == null)
                {
                    throw new ApplicationException("Could not initialize the conversion context.\n");
                }
            }
            _convertedFrameBufferPtr = Marshal.AllocHGlobal(ffmpeg.av_image_get_buffer_size(CVPxfmt, FrameSize.Width, FrameSize.Height, 1));

            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();
            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, CVPxfmt, FrameSize.Width, FrameSize.Height, 1);
        }
        public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            _destinationSize = destinationSize;

            _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            _dstLinesize = dstLinesize.ToArray();
            _dstData     = dstData.ToArray();

            for (uint i = 0; i < 4; i++)
            {
                dstData[i] = null;
            }

            ffmpeg.av_free(&dstLinesize);
        }
        public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat,
                                   Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            _srcSize        = sourceSize;
            _dstSize        = destinationSize;
            _srcPixelFormat = sourcePixelFormat;
            _dstPixelFormat = destinationPixelFormat;

            _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat,
                                                     destinationSize.Width,
                                                     destinationSize.Height, destinationPixelFormat,
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            logger.LogDebug($"Successfully initialised ffmpeg based image converted for {sourceSize}:{sourcePixelFormat}->{_dstSize}:{_dstPixelFormat}.");
        }
예제 #5
0
        public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat,
                                   Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            _destinationSize = destinationSize;

            _pConvertContext = ffmpeg.sws_getContext(
                sourceSize.Width, sourceSize.Height, sourcePixelFormat,
                destinationSize.Width, destinationSize.Height, destinationPixelFormat,
                ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (_pConvertContext == null)
            {
                Debug.WriteLine("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(
                destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);

            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(
                ref _dstData, ref _dstLinesize,
                (byte *)_convertedFrameBufferPtr,
                destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
        }
        public AVFrame Convert(byte *pSrcData)
        {
            byte_ptrArray4 src       = new byte_ptrArray4();
            int_array4     srcStride = new int_array4();

            ffmpeg.av_image_fill_arrays(ref src, ref srcStride, pSrcData, _srcPixelFormat, _srcWidth, _srcHeight, 1).ThrowExceptionIfError();

            ffmpeg.sws_scale(_pConvertContext, src, srcStride, 0, _srcHeight, _dstData, _dstLinesize).ThrowExceptionIfError();

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);
            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _dstWidth,
                height = _dstHeight,
                format = (int)_dstPixelFormat
            });
        }
        public VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat,
                                   Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            if (sourceSize.Width == 0 || sourceSize.Height == 0 || sourcePixelFormat == AVPixelFormat.AV_PIX_FMT_NONE)
            {
                throw new InvalidOperationException($"Invalid source: {sourceSize} [{sourcePixelFormat}]");
            }
            _destinationSize = destinationSize;

            _pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat,
                                                     destinationSize.Width,
                                                     destinationSize.Height, destinationPixelFormat,
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
        }
예제 #8
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.sws_scale(_pConvertContext, sourceFrame.data, sourceFrame.linesize, 0, sourceFrame.height, dstData, dstLinesize);

            return(new AVFrame());
        }
예제 #9
0
 private static byte_ptrArray8 From4(byte_ptrArray4 val)
 {
     return(new byte_ptrArray8
     {
         [0] = val[0],
         [1] = val[1],
         [2] = val[2],
         [3] = val[3]
     });
 }
예제 #10
0
        public AVFrame Convert(byte[] srcData)
        {
            //int linesz0 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 0);
            //int linesz1 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 1);
            //int linesz2 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 2);

            byte_ptrArray4 src       = new byte_ptrArray4();
            int_array4     srcStride = new int_array4();

            fixed(byte *pSrcData = srcData)
            {
                ffmpeg.av_image_fill_arrays(ref src, ref srcStride, pSrcData, _srcPixelFormat, _srcWidth, _srcHeight, 1).ThrowExceptionIfError();
            }

            //var srcFrameData = new byte_ptrArray8 {
            //    [0] = pSrcData,
            //    [1] = (linesz1 > 0) ? pSrcData + linesz0 : null,
            //    [2] = (linesz2 > 0) ? pSrcData + linesz0 + linesz1: null,
            //};
            //var srcLinesize = new int_array8 {
            //    [0] = linesz0,
            //    [1] = linesz1,
            //    [2] = linesz2
            //};

            //AVFrame srcFrame = new AVFrame
            //{
            //    data = srcFrameData,
            //    linesize = srcLinesize,
            //    width = _srcSize.Width,
            //    height = _srcSize.Height
            //};

            //ffmpeg.sws_scale(_pConvertContext, srcFrame.data, srcFrame.linesize, 0, srcFrame.height, _dstData, _dstLinesize).ThrowExceptionIfError();

            //int outputBufferSize = ffmpeg.av_image_get_buffer_size(_dstPixelFormat, _dstSize.Width, _dstSize.Height, 1);
            //byte[] outputBuffer = new byte[outputBufferSize];

            ffmpeg.sws_scale(_pConvertContext, src, srcStride, 0, _srcHeight, _dstData, _dstLinesize).ThrowExceptionIfError();

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);
            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _dstWidth,
                height = _dstHeight
            });
        }
예제 #11
0
 public DecodedFrame(byte *buffer, int bufferSize, int width, int height, AVPixelFormat format, byte_ptrArray4 destData, int_array4 destLineSize, bool sharedBuffer)
 {
     Buffer       = buffer;
     BufferSize   = bufferSize;
     Width        = width;
     Height       = height;
     Format       = format;
     DestData     = destData;
     DestLineSize = destLineSize;
     SharedBuffer = sharedBuffer;
 }
예제 #12
0
        public DecodedFrame Convert(RawFrame source, byte *targetBuffer)
        {
            var destData     = new byte_ptrArray4();
            var destLineSize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref destData, ref destLineSize, targetBuffer, _targetFormat, _targetWidth, _targetHeight, _align).ThrowExceptionIfError(nameof(ffmpeg.av_image_fill_arrays));

            var frame = source.Frame;

            ffmpeg.sws_scale(_converterContext, frame->data, frame->linesize, 0, frame->height, destData, destLineSize).ThrowExceptionIfError(nameof(ffmpeg.sws_scale));
            return(new DecodedFrame(targetBuffer, _targetBufferSize, _targetWidth, _targetHeight, _targetFormat, destData, destLineSize, sharedBuffer: true));
        }
예제 #13
0
    // Start is called before the first frame update
    void Start()
    {
        RegisterFFmpegBinaries();

        // Prepare textures and initial values
        screenWidth  = GetComponent <Camera>().pixelWidth;
        screenHeight = GetComponent <Camera>().pixelHeight;
        Debug.Log("Width: " + screenWidth + ", Height: " + screenHeight);

        // RTP 스트림 할당
        this.streamer = new RtpVideoStreamer("rtp://127.0.0.1:9000/test/", screenWidth, screenHeight);
        // 송신할 프레임 할당
        this.srcFrame         = ffmpeg.av_frame_alloc();
        this.srcFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
        this.srcFrame->width  = screenWidth;
        this.srcFrame->height = screenHeight;
        ffmpeg.av_frame_get_buffer(this.srcFrame, 32);
        // 테스트를 위해 RGB24 to YUV420P 변환 컨텍스트 할당
        this._convertContext = ffmpeg.sws_getContext(
            screenWidth, screenHeight, AVPixelFormat.AV_PIX_FMT_RGB24,
            screenWidth, screenHeight, AVPixelFormat.AV_PIX_FMT_YUV420P,
            ffmpeg.SWS_BICUBIC, null, null, null);

        var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_RGB24, (int)screenWidth, (int)screenHeight, 1);

        this._convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
        this._convertDstData          = new byte_ptrArray4();
        this._convertDstLinesize      = new int_array4();

        // Set target frame rate (optional)
        Application.targetFrameRate = frameRate;

        tempRenderTexture = new RenderTexture(screenWidth, screenHeight, 0);
        tempTexture2D     = new Texture2D(screenWidth, screenHeight, TextureFormat.RGB24, false);
        frameQueue        = new Queue <byte[]>();

        frameNumber = 0;

        captureFrameTime = 1.0f / (float)frameRate;
        lastFrameTime    = Time.time;

        // Kill the encoder thread if running from a previous execution
        if (encoderThread != null && (threadIsProcessing || encoderThread.IsAlive))
        {
            threadIsProcessing = false;
            encoderThread.Join();
        }

        // Start a new encoder thread
        threadIsProcessing = true;
        encoderThread      = new Thread(EncodeAndSave);
        encoderThread.Start();
    }
예제 #14
0
        public unsafe VideoFrameConverter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            this._destinationSize = destinationSize;
            this._pConvertContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourcePixelFormat, destinationSize.Width, destinationSize.Height, destinationPixelFormat, 1, null, null, null);
            if (this._pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }
            int convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);

            this._convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            this._dstData     = default(byte_ptrArray4);
            this._dstLinesize = default(int_array4);
            ffmpeg.av_image_fill_arrays(ref this._dstData, ref this._dstLinesize, (byte *)(void *)this._convertedFrameBufferPtr, destinationPixelFormat, destinationSize.Width, destinationSize.Height, 1);
        }
예제 #15
0
        // align to 8 for improved vectorization

        public VideoFrame(PixelFormat pixelFormat, int width, int height, int align = 1)
        {
            if (pixelFormat == default)
            {
                throw new ArgumentException("Must not be Unknown", nameof(pixelFormat));
            }

            if (width <= 0)
            {
                throw new ArgumentException("Must be > 0", nameof(width));
            }

            if (height <= 0)
            {
                throw new ArgumentException("Must be > 0", nameof(height));
            }

            Format = pixelFormat;
            Width  = width;
            Height = height;

            pointer->format = (int)pixelFormat.ToAVFormat();
            pointer->width  = width;
            pointer->height = height;

            Memory = Buffer.Allocate(VideoFormatHelper.GetBufferSize(pixelFormat, width, height, align));

            // Fill the pointers

            planePointers = new byte_ptrArray4();

            var lineSizes = new int_array4();

            ffmpeg.av_image_fill_arrays(
                ref planePointers,
                ref lineSizes,
                src: (byte *)Memory.Pointer,
                pixelFormat.ToAVFormat(),
                width,
                height,
                align
                );

            Strides = GetStrides(lineSizes);

            pointer->data     = From4(planePointers);
            pointer->linesize = From4(lineSizes);
        }
예제 #16
0
        public VideoFrameConveter(Size sourceSize, AVPixelFormat sourcePixelFormat, Size destinationSize, AVPixelFormat destinationPixelFormat)
        {
            _destinationSize = destinationSize;
            _pConvertContext = ffmpeg.sws_getContext((int)sourceSize.Width, (int)sourceSize.Height, sourcePixelFormat,
                                                     (int)destinationSize.Width, (int)destinationSize.Height, destinationPixelFormat,
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, (int)destinationSize.Width, (int)destinationSize.Height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLineSize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat,
                                        (int)destinationSize.Width, (int)destinationSize.Height, 1);
        }
예제 #17
0
    public VideoFrameConverter(
        int sourceWidth,
        int sourceHeight,
        AVPixelFormat sourcePixelFormat,
        int destinationWidth,
        int destinationHeight,
        AVPixelFormat destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_RGBA)
    {
        _destinationWidth  = destinationWidth;
        _destinationHeight = destinationHeight;

        _pConvertContext = ffmpeg.sws_getContext(
            sourceWidth,
            sourceHeight,
            sourcePixelFormat,
            destinationWidth,
            destinationHeight,
            destinationPixelFormat,
            ffmpeg.SWS_FAST_BILINEAR,
            null,
            null,
            null);
        if (_pConvertContext == null)
        {
            throw new FFmpegException("Could not initialize the conversion context.");
        }

        var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(
            destinationPixelFormat,
            destinationWidth,
            destinationHeight,
            1);

        _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
        _dstData     = default;
        _dstLinesize = default;

        ffmpeg.av_image_fill_arrays(
            ref _dstData,
            ref _dstLinesize,
            (byte *)_convertedFrameBufferPtr,
            destinationPixelFormat,
            destinationWidth,
            destinationHeight,
            1);
    }
예제 #18
0
        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            source          = url;
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, source, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *codec = null;

            _streamIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0).ThrowExceptionIfError();
            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            pStream = _pFormatContext->streams[_streamIndex];
            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(codec->id);
            FrameSize   = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat = _pCodecContext->pix_fmt;


            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();

            PConvertContext = ffmpeg.sws_getContext(_pCodecContext->width, _pCodecContext->height, _pCodecContext->pix_fmt,
                                                    _pCodecContext->width, _pCodecContext->height, AVPixelFormat.AV_PIX_FMT_BGRA,
                                                    ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (PConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            dstData                 = new byte_ptrArray4();
            dstLinesize             = new int_array4();
            convertedFrameBufferPtr = Marshal.AllocHGlobal(ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGRA, _pCodecContext->width, _pCodecContext->height, 1));
            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize,
                                        (byte *)convertedFrameBufferPtr,
                                        AVPixelFormat.AV_PIX_FMT_BGRA, _pCodecContext->width, _pCodecContext->height, 1);
        }
예제 #19
0
        public FrameData(AvPixelFormat format, int width, int height)
        {
            Width  = width;
            Height = height;
            Format = format;

            var bufferSize = GetSize(format, width, height, 1);

            buffer = AvBuffer.Allocate(bufferSize);

            dstData     = new byte_ptrArray4();
            dstLinesize = new int_array4();

            unsafe
            {
                ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)buffer.Pointer, (AVPixelFormat)format, width, height, 1);
            }
        }
예제 #20
0
        public Converter(System.Drawing.Size size, AVPixelFormat sourcePixelFormat)
        {
            Size = size;
            const AVPixelFormat targetPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
            var context = ffmpeg.sws_getContext(size.Width, size.Height, sourcePixelFormat, size.Width, size.Height, targetPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            _convertedBufferSize = ffmpeg.av_image_get_buffer_size(targetPixelFormat, size.Width, size.Height, 1);

            _buffer       = new byte[_convertedBufferSize];
            _bufferHandle = GCHandle.Alloc(_buffer);

            _convertedFrameBuffer = Marshal.UnsafeAddrOfPinnedArrayElement(_buffer, 0);
            _dstData        = new byte_ptrArray4();
            _dstLineSize    = new int_array4();
            _convertContext = context;

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLineSize, (byte *)_convertedFrameBuffer, targetPixelFormat, size.Width, size.Height, 1);
        }
예제 #21
0
        public int ExtractFrames([NotNull] FFmpegStream stream, int count)
        {
            FFmpegUtils.EnsurePlatformSupport();
            if (isDisposed)
            {
                throw new ObjectDisposedException(nameof(FFmpegMedia));
            }
            if (!IsOpen)
            {
                // TODO: log?
                throw new InvalidOperationException(@"Media isn't open.");
            }

            var codecContext = *stream.AVStream->codec;
            var streamInfo   = GetStreamInfo(stream);

            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)streamInfo.Image.Buffer, DestinationPixelFormat, codecContext.width, codecContext.height, 1);
            streamInfo.Image.Linesize = dstLinesize[0];

            var extractedFrameCount = 0;

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            for (int i = 0; i < count; i++)
            {
                var extractionStatus = ExtractNextImage(streamInfo, pPacket, stream.AVStream, dstData, dstLinesize);
                streamInfo.ReachedEnd = extractionStatus == FrameExtractionStatus.ReachEOF;
                if (extractionStatus == FrameExtractionStatus.Succeeded)
                {
                    ++extractedFrameCount;
                }
            }

            return(extractedFrameCount);
        }
        public VideoFlipperConverter(int width, int height, AVPixelFormat inputPixelFormat, StreamerSettings settings)
        {
            string filters    = $"buffer=width={width}:height={height}:pix_fmt={(int)inputPixelFormat}:time_base=1/1:pixel_aspect=1/1 [in]; [out] buffersink;[in] format=pix_fmts=0 [in1];";
            int    inputCount = 1;

            if (settings.FlipY)
            {
                filters += $"[in{inputCount}] vflip [in{++inputCount}];";
            }
            if (settings.FlipX)
            {
                filters += $"[in{inputCount}] hflip [in{++inputCount}];";
            }

            filters += $"[in{inputCount}] copy [out]";
            AVFilterInOut *gis = null;
            AVFilterInOut *gos = null;

            _filterGraph = ffmpeg.avfilter_graph_alloc();
            ffmpeg.avfilter_graph_parse2(_filterGraph, filters, &gis, &gos).ThrowExceptionIfError();
            ffmpeg.avfilter_graph_config(_filterGraph, null).ThrowExceptionIfError();

            _filterSourceContext = ffmpeg.avfilter_graph_get_filter(_filterGraph, "Parsed_buffer_0");
            _filterSinkContext   = ffmpeg.avfilter_graph_get_filter(_filterGraph, "Parsed_buffersink_1");
            if (_filterSourceContext == null || _filterSinkContext == null)
            {
                throw new Exception("Failed to create filter sinks");
            }

            _flippedFrame = ffmpeg.av_frame_alloc();
            var flippedFrameBuffer = (byte *)ffmpeg.av_malloc((ulong)ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1));
            var dataArr            = new byte_ptrArray4();

            dataArr.UpdateFrom(_flippedFrame->data);
            var linesizeArr = new int_array4();

            linesizeArr.UpdateFrom(_flippedFrame->linesize);
            ffmpeg.av_image_fill_arrays(ref dataArr, ref linesizeArr, flippedFrameBuffer, AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1);
            _flippedFrame->data.UpdateFrom(dataArr);
            _flippedFrame->linesize.UpdateFrom(linesizeArr);
        }
        public AVFrame MakeFrame(byte[] i420Buffer, int width, int height)
        {
            AVFrame i420Frame = new AVFrame
            {
                width  = width,
                height = height,
                format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P
            };

            fixed(byte *pSrcData = i420Buffer)
            {
                var data     = new byte_ptrArray4();
                var linesize = new int_array4();

                ffmpeg.av_image_fill_arrays(ref data, ref linesize, pSrcData, AVPixelFormat.AV_PIX_FMT_YUV420P, width, height, 1).ThrowExceptionIfError();

                i420Frame.data.UpdateFrom(data);
                i420Frame.linesize.UpdateFrom(linesize);
            }

            return(i420Frame);
        }
        public VideoFrameConverter(int srcWidth, int srcHeight, AVPixelFormat sourcePixelFormat,
                                   int dstWidth, int dstHeight, AVPixelFormat destinationPixelFormat)
        {
            _srcWidth       = srcWidth;
            _srcHeight      = srcHeight;
            _dstWidth       = dstWidth;
            _dstHeight      = dstHeight;
            _srcPixelFormat = sourcePixelFormat;
            _dstPixelFormat = destinationPixelFormat;

            _pConvertContext = ffmpeg.sws_getContext(srcWidth, srcHeight, sourcePixelFormat,
                                                     dstWidth, dstHeight, destinationPixelFormat,
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, dstWidth, dstHeight, 1).ThrowExceptionIfError();

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, dstWidth, dstHeight, 1)
            .ThrowExceptionIfError();

            logger.LogDebug($"Successfully initialised ffmpeg based image converted for {srcWidth}:{srcHeight}:{sourcePixelFormat}->{dstWidth}:{dstHeight}:{_dstPixelFormat}.");


            _dstFrame         = ffmpeg.av_frame_alloc();
            _dstFrame->width  = _dstWidth;
            _dstFrame->height = _dstHeight;
            _dstFrame->data.UpdateFrom(_dstData);
            _dstFrame->linesize.UpdateFrom(_dstLinesize);
            _dstFrame->format = (int)_dstPixelFormat;
        }
예제 #25
0
        public VideoFrameConverter(DecoderConfiguration decoderConfiguration)
        {
            configuration = decoderConfiguration;

            _pConvertContext = ffmpeg.sws_getContext(configuration.inputResolution.width, configuration.inputResolution.height,
                                                     FormatHelper.OT2ToFFmpeg(configuration.inputPixelFormat),
                                                     configuration.outputResolution.width,
                                                     configuration.outputResolution.height, FormatHelper.OT2ToFFmpeg(configuration.outputPixelFormat),
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(FormatHelper.OT2ToFFmpeg(configuration.outputPixelFormat)
                                                                           , configuration.outputResolution.width, configuration.outputResolution.height, 1);

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, FormatHelper.OT2ToFFmpeg(configuration.outputPixelFormat)
                                        , configuration.outputResolution.width, configuration.outputResolution.height, 1);
        }
예제 #26
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();

            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            // setup logging
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                Console.Write(line);
            };

            ffmpeg.av_log_set_callback(logCallback);

            // decode N frames from url or path

            //string url = @"../../sample_mpeg4.mp4";
            var url = @"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4";

            var pFormatContext = ffmpeg.avformat_alloc_context();

            if (ffmpeg.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new ApplicationException(@"Could not open file.");
            }

            if (ffmpeg.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new ApplicationException(@"Could not find stream info");
            }

            AVStream *pStream = null;

            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream.");
            }


            var codecContext = *pStream->codec;

            Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");

            var width             = codecContext.width;
            var height            = codecContext.height;
            var sourcePixFmt      = codecContext.pix_fmt;
            var codecId           = codecContext.codec_id;
            var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
            var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                          width, height, destinationPixFmt,
                                                          ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            var pConvertedFrame          = ffmpeg.av_frame_alloc();
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
            var convertedFrameBufferPtr  = Marshal.AllocHGlobal(convertedFrameBufferSize);
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);

            var pCodec = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec.");
            }

            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new ApplicationException(@"Could not open codec.");
            }

            var pDecodedFrame = ffmpeg.av_frame_alloc();

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;

            while (frameNumber < 200)
            {
                try
                {
                    if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0)
                    {
                        throw new ApplicationException(@"Could not read frame.");
                    }

                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    if (ffmpeg.avcodec_send_packet(pCodecContext, pPacket) < 0)
                    {
                        throw new ApplicationException($@"Error while sending packet {frameNumber}.");
                    }

                    if (ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame) < 0)
                    {
                        throw new ApplicationException($@"Error while receiving frame {frameNumber}.");
                    }

                    Console.WriteLine($@"frame: {frameNumber}");

                    ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);
                }

#if !NETCOREAPP2_0
                using (var bitmap = new System.Drawing.Bitmap(width, height, dstLinesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, convertedFrameBufferPtr))
                    bitmap.Save(@"frame.buffer.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
#endif

                frameNumber++;
            }

            Marshal.FreeHGlobal(convertedFrameBufferPtr);
            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);
        }
예제 #27
0
        /// <returns>return true if reached end of stream</returns>
        private FrameExtractionStatus ExtractNextImage(StreamInfo streamInfo, AVPacket *pPacket, AVStream *pStream, byte_ptrArray4 dstData, int_array4 dstLinesize)
        {
            AVFrame *pFrame;
            var      pCodecContext = streamInfo.Codec.pAVCodecContext;
            var      outputImage   = streamInfo.Image;

            while (true)
            {
                try
                {
                    var ret = ffmpeg.av_read_frame(AVFormatContext, pPacket);
                    if (ret < 0)
                    {
                        if (ret == ffmpeg.AVERROR_EOF)
                        {
                            return(FrameExtractionStatus.ReachEOF);
                        }

                        Logger.Error($"Could not read frame. Error code={ret.ToString("X8")}.");
                        Logger.Error(GetErrorMessage(ret));
                        return(FrameExtractionStatus.Failed);
                    }

                    var packetSideData = ffmpeg.av_packet_get_side_data(pPacket, AVPacketSideDataType.AV_PKT_DATA_STEREO3D, null);

                    // Note: the other stream might be sound (which we will want to process at some point)
                    if (pPacket->stream_index != pStream->index)
                    {
                        continue;
                    }

                    ret = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
                    if (ret < 0)
                    {
                        Logger.Error($"Error while sending packet. Error code={ret.ToString("X8")}");
                        Logger.Error(GetErrorMessage(ret));
                        return(FrameExtractionStatus.Failed);
                    }

                    ret = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
                    //if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN)) // don't want to block the execution thread
                    //    continue;

                    if (ret < 0)
                    {
                        Logger.Error($"Error while receiving frame. Error code={ret.ToString("X8")}");
                        Logger.Error(GetErrorMessage(ret));
                        // Might be a bad frame, ignore it.
                        return(FrameExtractionStatus.Failed);
                    }
                    var frameSideData = ffmpeg.av_frame_get_side_data(pDecodedFrame, AVFrameSideDataType.AV_FRAME_DATA_STEREO3D);

                    // copies the decoded frame on the CPU if needed
                    if (streamInfo.Codec.DecoderOutputTexture == null)
                    {
                        if (pDecodedFrame->format == (int)streamInfo.Codec.HardwarePixelFormat)
                        {
                            // the frame is coming from the GPU
                            ret = ffmpeg.av_hwframe_transfer_data(pCpuCopyFrame, pDecodedFrame, 0);
                            if (ret < 0)
                            {
                                throw new ApplicationException("Couldn't transfer frame data from GPU to CPU");
                            }

                            pFrame = pCpuCopyFrame;
                        }
                        else
                        {
                            pFrame = pDecodedFrame;
                        }

                        // Create the convert context for frame format convertion
                        var width        = pCodecContext->width;
                        var height       = pCodecContext->height;
                        var sourcePixFmt = (AVPixelFormat)pFrame->format;
                        pConvertContext = ffmpeg.sws_getCachedContext(pConvertContext, width, height, sourcePixFmt, width, height, DestinationPixelFormat, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
                        if (pConvertContext == null)
                        {
                            throw new ApplicationException("Could not initialize the conversion context.");
                        }

                        ffmpeg.sws_scale(pConvertContext, pFrame->data, pFrame->linesize, 0, outputImage.Height, dstData, dstLinesize);
                        outputImage.Timestamp = pDecodedFrame->pts;
                    }

                    return(FrameExtractionStatus.Succeeded);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);
                }
            }
        }
예제 #28
0
    void StartVideo()
    {
        //Debug.Log("enable");
        ffmpeg.av_register_all();
        ffmpeg.avcodec_register_all();
        ffmpeg.avformat_network_init();


        UnityEngine.Debug.Log("url:" + url);

        var pFormatContext = ffmpeg.avformat_alloc_context();

        if (ffmpeg.avformat_open_input(&pFormatContext, url, null, null) != 0)
        {
            throw new ApplicationException(@"Could not open file");
        }

        if (ffmpeg.avformat_find_stream_info(pFormatContext, null) != 0)
        {
            throw new ApplicationException(@"Could not find stream info");
        }
        AVStream *pStream = null;

        for (var i = 0; i < pFormatContext->nb_streams; i++)
        {
            if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            {
                pStream = pFormatContext->streams[i];
                break;
            }
        }
        if (pStream == null)
        {
            throw new ApplicationException(@"Could not found video stream");
        }

        var codecContext = *pStream->codec;

        width  = codecContext.width;
        height = codecContext.height;
        var sourcePixFmt      = codecContext.pix_fmt;
        var codecId           = codecContext.codec_id;
        var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_RGB24;
        var pConvertContext   = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                                                      width, height, destinationPixFmt,
                                                      ffmpeg.SWS_FAST_BILINEAR, null, null, null);

        if (pConvertContext == null)
        {
            throw new ApplicationException(@"Could not initialize the conversion context");
        }

        var   pConvertedFrame          = ffmpeg.av_frame_alloc();
        var   convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
        var   convertedFrameBuffer     = new byte[convertedFrameBufferSize];
        byte *a;

        fixed(byte *pSource = convertedFrameBuffer)
        {
            a = pSource;
        }

        gconvertedFrameBuffer = convertedFrameBuffer;

        var dstData     = new byte_ptrArray4();
        var dstLinesize = new int_array4();

        ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, a, destinationPixFmt, width, height, 1);

        var pCodec = ffmpeg.avcodec_find_decoder(codecId);

        if (pCodec == null)
        {
            throw new ApplicationException(@"Unsupported codec");
        }

        // reusing codec context from stream info, initally it was looking like this:
        // AVCodecContext* pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec); // but this is not working for all kind of codecs
        var pCodecContext = &codecContext;

        if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
        {
            pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
        }

        if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
        {
            throw new ApplicationException(@"Could not open codec");
        }

        var pDecodedFrame = ffmpeg.av_frame_alloc();

        AVPacket packet = new AVPacket();

        ffmpeg.av_init_packet(&packet);

        isNewFrameReady = true;
        Boolean finished = false;

        //Stopwatch stopwatch = new Stopwatch();
        //int count = 0;
        while (!finished)
        {
            //stopwatch.Reset();
            //stopwatch.Start();
            var pPacket = &packet;
            if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0)
            {
                throw new ApplicationException(@"Could not read frame");
            }
            //stopwatch.Stop();
            //UnityEngine.Debug.Log("av_read_frame time:" + stopwatch.ElapsedMilliseconds);

            if (pPacket->stream_index != pStream->index)
            {
                continue;
            }

            //stopwatch.Reset();
            //stopwatch.Start();
            if (ffmpeg.avcodec_send_packet(pCodecContext, pPacket) < 0)
            {
                throw new ApplicationException("Error while sending packet");
            }
            if (ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame) < 0)
            {
                continue;
            }
            //stopwatch.Stop();
            //UnityEngine.Debug.Log("decode time:" + stopwatch.ElapsedMilliseconds);

            //stopwatch.Reset();
            //stopwatch.Start();
            ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
            //stopwatch.Stop();
            //UnityEngine.Debug.Log("sws_scale time:" + stopwatch.ElapsedMilliseconds);

            ffmpeg.av_packet_unref(pPacket);
            ffmpeg.av_frame_unref(pDecodedFrame);
            //count++;
            //if(count == 5)
            //    finished = true;
        }
        ffmpeg.av_free(pConvertedFrame);
        ffmpeg.sws_freeContext(pConvertContext);
        ffmpeg.av_free(pDecodedFrame);
        ffmpeg.avcodec_close(pCodecContext);
        ffmpeg.avformat_close_input(&pFormatContext);
    }
예제 #29
0
        public MediaFile(String path, bool tryFrames = true)
        {
            pFormatCtx = ffmpeg.avformat_alloc_context();

            var formatCtx = pFormatCtx;

            ffmpeg.avformat_open_input(&formatCtx, path, null, null).ThrowExceptionIfError();

            ffmpeg.avformat_find_stream_info(pFormatCtx, null).ThrowExceptionIfError();

            AVStream *pStream = null;

            for (var i = 0; i < pFormatCtx->nb_streams; i++)
            {
                if (pFormatCtx->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatCtx->streams[i];
                    break;
                }
            }

            if (pStream == null)
            {
                throw new InvalidOperationException("Could not found video stream.");
            }

            streamIndex = pStream->index;
            pCodecCtx   = pStream->codec;

            var codecId = pCodecCtx->codec_id;
            var pCodec  = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new InvalidOperationException("Unsupported codec.");
            }

            filepath = path;

            ffmpeg.avcodec_open2(pCodecCtx, pCodec, null).ThrowExceptionIfError();

            CodecName = ffmpeg.avcodec_get_name(codecId);

            pPacket = ffmpeg.av_packet_alloc();
            pFrame  = ffmpeg.av_frame_alloc();

            width       = pCodecCtx->width;
            height      = pCodecCtx->height;
            totalFrames = tryFrames ? TryGetFrameCount() : pStream->nb_frames;

            var pixFmt = pCodecCtx->pix_fmt;

            if (pixFmt == AVPixelFormat.AV_PIX_FMT_NONE)
            {
                pixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
            }

            var destPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;

            scaleCtx = ffmpeg.sws_getContext(width, height, pixFmt,
                                             width, height, destPixFmt,
                                             ffmpeg.SWS_BICUBIC, null, null, null);

            var rgbBufSize = ffmpeg.av_image_get_buffer_size(destPixFmt, width, height, 1);

            rgbFrameBuffer = Marshal.AllocHGlobal(rgbBufSize);

            rgbBuf      = new byte_ptrArray4();
            dstLinesize = new int_array4();
            ffmpeg.av_image_fill_arrays(ref rgbBuf, ref dstLinesize, (byte *)rgbFrameBuffer, destPixFmt, width, height, 1);
        }
예제 #30
0
        private void ReadFrames()
        {
            pConvertedFrameBuffer = IntPtr.Zero;
            pConvertContext       = null;

            var audioInited = false;
            var videoInited = false;

            byte[] buffer      = null, tbuffer = null;
            var    dstData     = new byte_ptrArray4();
            var    dstLinesize = new int_array4();
            BufferedWaveProvider waveProvider = null;

            sampleChannel = null;
            var packet = new AVPacket();

            do
            {
                ffmpeg.av_init_packet(&packet);
                if (_audioCodecContext != null && buffer == null)
                {
                    buffer  = new byte[_audioCodecContext->sample_rate * 2];
                    tbuffer = new byte[_audioCodecContext->sample_rate * 2];
                }

                if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet)))
                {
                    break;
                }


                if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT)
                {
                    break;
                }

                var nf = NewFrame;
                var da = DataAvailable;

                _lastPacket = DateTime.UtcNow;

                var ret = -11; //EAGAIN
                if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null && !_ignoreAudio)
                {
                    if (HasAudioStream != null)
                    {
                        HasAudioStream?.Invoke(this, EventArgs.Empty);
                        HasAudioStream = null;
                    }

                    if (da != null)
                    {
                        var s = 0;
                        fixed(byte **outPtrs = new byte *[32])
                        {
                            fixed(byte *bPtr = &tbuffer[0])
                            {
                                outPtrs[0] = bPtr;
                                var af = ffmpeg.av_frame_alloc();

                                ffmpeg.avcodec_send_packet(_audioCodecContext, &packet);
                                do
                                {
                                    ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, af);

                                    if (ret == 0)
                                    {
                                        int numSamplesOut = 0;
                                        try
                                        {
                                            if (_swrContext == null)
                                            {
                                                //need to do this here as send_packet can change channel layout and throw an exception below
                                                initSWR();
                                            }
                                            var dat = af->data[0];

                                            numSamplesOut = ffmpeg.swr_convert(_swrContext,
                                                                               outPtrs,
                                                                               _audioCodecContext->sample_rate,
                                                                               &dat,
                                                                               af->nb_samples);
                                        }
                                        catch (Exception ex)
                                        {
                                            Logger.LogException(ex, "MediaStream - Audio Read");
                                            _ignoreAudio = true;
                                            break;
                                        }

                                        if (numSamplesOut > 0)
                                        {
                                            var l = numSamplesOut * 2 * OutFormat.Channels;
                                            Buffer.BlockCopy(tbuffer, 0, buffer, s, l);
                                            s += l;
                                        }
                                        else
                                        {
                                            ret = numSamplesOut; //(error)
                                        }
                                    }
                                    if (af->decode_error_flags > 0)
                                    {
                                        break;
                                    }
                                } while (ret == 0);
                                ffmpeg.av_frame_free(&af);
                                if (s > 0)
                                {
                                    var ba = new byte[s];
                                    Buffer.BlockCopy(buffer, 0, ba, 0, s);

                                    if (!audioInited)
                                    {
                                        audioInited     = true;
                                        RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16,
                                                                         _audioCodecContext->channels);

                                        waveProvider = new BufferedWaveProvider(RecordingFormat)
                                        {
                                            DiscardOnBufferOverflow = true,
                                            BufferDuration          = TimeSpan.FromMilliseconds(200)
                                        };
                                        sampleChannel = new SampleChannel(waveProvider);

                                        sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                                    }


                                    waveProvider.AddSamples(ba, 0, s);

                                    var sampleBuffer = new float[s];
                                    var read         = sampleChannel.Read(sampleBuffer, 0, s);


                                    da(this, new DataAvailableEventArgs(ba, s));


                                    if (Listening)
                                    {
                                        WaveOutProvider?.AddSamples(ba, 0, read);
                                    }
                                }
                            }
                        }
                    }
                }

                if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index &&
                    _videoCodecContext != null)
                {
                    var ef = ShouldEmitFrame;
                    ffmpeg.avcodec_send_packet(_videoCodecContext, &packet);
                    do
                    {
                        var vf = ffmpeg.av_frame_alloc();
                        ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, vf);
                        if (ret == 0 && ef)
                        {
                            AVPixelFormat srcFmt;
                            if (_hwDeviceCtx != null)
                            {
                                srcFmt = AVPixelFormat.AV_PIX_FMT_NV12;
                                var output = ffmpeg.av_frame_alloc();
                                ffmpeg.av_hwframe_transfer_data(output, vf, 0);
                                ffmpeg.av_frame_copy_props(output, vf);
                                ffmpeg.av_frame_free(&vf);
                                vf = output;
                            }
                            else
                            {
                                srcFmt = (AVPixelFormat)vf->format;
                            }

                            if (!videoInited)
                            {
                                videoInited = true;

                                _finalSize = Helper.CalcResizeSize(_source.settings.resize, new Size(_videoCodecContext->width, _videoCodecContext->height), new Size(_source.settings.resizeWidth, _source.settings.resizeHeight));

                                var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1);
                                pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize);
                                ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1);
                                pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, NormalizePixelFormat(srcFmt), _finalSize.Width, _finalSize.Height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
                            }

                            Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, vf->data, vf->linesize, 0, _videoCodecContext->height, dstData, dstLinesize));


                            if (vf->decode_error_flags > 0)
                            {
                                ffmpeg.av_frame_free(&vf);
                                break;
                            }

                            using (
                                var mat = new Bitmap(_finalSize.Width, _finalSize.Height, dstLinesize[0],
                                                     PixelFormat.Format24bppRgb, pConvertedFrameBuffer))
                            {
                                var nfe = new NewFrameEventArgs(mat);
                                nf.Invoke(this, nfe);
                            }

                            _lastVideoFrame = DateTime.UtcNow;
                            ffmpeg.av_frame_free(&vf);
                            break;
                        }
                        ffmpeg.av_frame_free(&vf);
                    } while (ret == 0);
                }

                if (nf != null && _videoStream != null)
                {
                    if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds * 1000 > _timeoutMicroSeconds)
                    {
                        _res   = ReasonToFinishPlaying.DeviceLost;
                        _abort = true;
                    }
                }

                ffmpeg.av_packet_unref(&packet);
                if (ret == -11)
                {
                    Thread.Sleep(10);
                }
            } while (!_abort && !MainForm.ShuttingDown);

            NewFrame?.Invoke(this, new NewFrameEventArgs(null));

            CleanUp();
        }