public VideoDecoder()
        {
            AVCodec* codec = FFmpegInvoke.avcodec_find_decoder(CodecId);
            if (codec == null) throw new Exception("Codec not found");

            codec_context = FFmpegInvoke.avcodec_alloc_context3(codec);
            if (codec_context == null) throw new Exception("Could not allocate video codec context");

            if (FFmpegInvoke.avcodec_open2(codec_context, codec, null) < 0) throw new Exception("Could not open codec");

            avFrame = FFmpegInvoke.avcodec_alloc_frame();
            if (avFrame == null) throw new Exception("Could not allocate video frame");
        }
 private void Initialize(int width, int height, AVPixelFormat inFormat)
 {
   _initialized = true;
   _pContext = FFmpegInvoke.sws_getContext(width, height, inFormat, width, height, _pixelFormat, FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);
   if (_pContext == null)
     throw new VideoConverterException("Could not initialize the conversion context.");
   _pCurrentFrame = FFmpegInvoke.avcodec_alloc_frame();
   int outputDataSize = FFmpegInvoke.avpicture_get_size(_pixelFormat, width, height);
   _outputData = new byte[outputDataSize];
   fixed (byte* pOutputData = &_outputData[0])
   {
     FFmpegInvoke.avpicture_fill((AVPicture*)_pCurrentFrame, pOutputData, _pixelFormat, width, height);
   }
 }
        /// <summary>
        /// Constructs a new VideoDecoderStream over a specific filename.
        /// </summary>
        /// <param name="Filename">File to decode</param>
        internal VideoDecoderStream(MediaFileReader file, ref AVStream stream)
            : base(file, ref stream)
        {
            // allocate video frame
            m_avFrame = FFmpeg.avcodec_alloc_frame();
            if (FFmpeg.avpicture_alloc(out m_avPicture, m_avCodecCtx.pix_fmt, m_avCodecCtx.width, m_avCodecCtx.height) != 0)
                throw new DecoderException("Error allocating AVPicture");
            m_avPicture_allocated = true;

            int buffersize = FFmpeg.avpicture_get_size(m_avCodecCtx.pix_fmt, m_avCodecCtx.width, m_avCodecCtx.height);
            if (buffersize <= 0)
                throw new DecoderException("Invalid size returned by avpicture_get_size");

            m_buffer = new byte[buffersize];
        }
Exemple #4
0
        public VideoDecoder()
        {
            _pFrame = FFmpegInvoke.av_frame_alloc();

            AVCodec* pCodec = FFmpegInvoke.avcodec_find_decoder(CodecId);

            if (pCodec == null)
                throw new VideoDecoderException("Unsupported codec.");

            _pDecodingContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);


            if (FFmpegInvoke.avcodec_open2(_pDecodingContext, pCodec, null) < 0)
                throw new VideoDecoderException("Could not open codec.");
        }
Exemple #5
0
        private void OpenAudio()
        {
            var codec = ffmpeg.avcodec_find_encoder(_audioCodecContext->codec_id);

            if (codec == null)
            {
                throw new Exception("Cannot find audio codec.");
            }

            ffmpeg.av_opt_set(_audioCodecContext->priv_data, "tune", "zerolatency", 0);

            var ret = ffmpeg.avcodec_open2(_audioCodecContext, codec, null);

            if (ret < 0)
            {
                throw new Exception("Cannot open audio codec.");
            }

            ffmpeg.avcodec_parameters_from_context(_audioStream->codecpar, _audioCodecContext);

            _audioFrame = ffmpeg.av_frame_alloc();
        }
        public void WriteFrameToEncoder(AudioEncoder encoder)
        {
            AVFrame *outputFrame = null;

            Util.InitOutputFrame(&outputFrame, encoder, GetBufferSize());
            try
            {
                byte *[] outputDataArray = outputFrame->data;
                fixed(byte **dataPtr = &outputDataArray[0])
                {
                    if (ffmpeg.av_audio_fifo_read(audioFifo, (void **)dataPtr, outputFrame->nb_samples) < outputFrame->nb_samples)
                    {
                        throw new FFmpegException(ffmpeg.AVERROR_UNKNOWN, "Failed to read data from fifo buffer.");
                    }
                }
                encoder.WriteNextAudioFrame(outputFrame);
            }
            finally
            {
                ffmpeg.av_frame_free(&outputFrame);
            }
        }
Exemple #7
0
        public void Start(string filename)
        {
            frame = ffmpeg.av_frame_alloc();
            if (frame == null)
            {
                throw new Exception("alloc frame fail");
            }

            frame->format = (int)context->pix_fmt;
            frame->width  = context->width;
            frame->height = context->height;

            ffmpeg.av_frame_get_buffer(frame, 32).ThrowExceptionIfError();
            ffmpeg.av_frame_make_writable(frame).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(context, codec, null).ThrowExceptionIfError();

            packet = ffmpeg.av_packet_alloc();
            if (packet == null)
            {
                throw new Exception("alloc packet fail");
            }

            stream            = ffmpeg.avformat_new_stream(formatContext, codec);
            stream->time_base = new AVRational {
                num = 1, den = frames_per_second
            };
            stream->codecpar->codec_tag = 0;

            ffmpeg.avcodec_parameters_from_context(stream->codecpar, context);
            if ((formatContext->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
            {
                stream->codec->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
            }

            ffmpeg.avio_open2(&formatContext->pb, filename, ffmpeg.AVIO_FLAG_READ_WRITE, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_write_header(formatContext, null).ThrowExceptionIfError();
            ioOpened = true;
            pts      = 0;
        }
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                //if (m_StreamIO != null)
                //{
                //    m_StreamIO.Dispose();
                //    m_StreamIO = null;
                //}
                if (m_stream != null)
                {
                    m_stream.Dispose();
                    m_stream = null;
                }
            }

            AVFormatContext *fmt_ctx1 = fmt_ctx;

            ffmpeg.avformat_close_input(&fmt_ctx1);
            fmt_ctx = null;

            AVCodecContext *c1 = c;

            ffmpeg.avcodec_free_context(&c1);
            c = c1;
            //c = null;

            AVFrame *decoded_frame1 = decoded_frame;

            ffmpeg.av_frame_free(&decoded_frame1);
            decoded_frame = decoded_frame1;
            //decoded_frame = null;

            AVPacket *pkt1 = pkt;

            ffmpeg.av_packet_free(&pkt1);
            pkt = pkt1;
            //pkt = null;
        }
Exemple #9
0
        public byte[] ConvertFrame(AVFrame *pFrame)
        {
            if (_initialized == false)
            {
                Initialize(pFrame->width, pFrame->height, (AVPixelFormat)pFrame->format);

                fixed(byte *pOutputData = &_outputData[0])
                {
                    //byte** pSrcData = &(pFrame)->data_0;
                    //byte** pDstData = &(_pCurrentFrame)->data_0;

                    //_pCurrentFrame->data_0 = pOutputData;

                    var _pOutputData = new byte_ptrArray8 {
                        [0] = pOutputData
                    };

                    ffmpeg.sws_scale(_pContext, pFrame->data, pFrame->linesize, 0, pFrame->height, _pCurrentFrame->data, _pCurrentFrame->linesize);
                }

                return(_outputData);
        }
Exemple #10
0
        private void SwrCheckInit(MediaFrame srcFrame)
        {
            unsafe
            {
                if (pSwrContext == null && !isDisposing)
                {
                    AVFrame *src = srcFrame;
                    AVFrame *dst = dstFrame;
                    ulong    srcChannelLayout = src->channel_layout;
                    if (srcChannelLayout == 0)
                    {
                        srcChannelLayout = FFmpegHelper.GetChannelLayout(src->channels);
                    }

                    pSwrContext = ffmpeg.swr_alloc_set_opts(null,
                                                            (long)DstChannelLayout, DstFormat, DstSampleRate == 0 ? src->sample_rate : DstSampleRate,
                                                            (long)srcChannelLayout, (AVSampleFormat)src->format, src->sample_rate,
                                                            0, null);
                    ffmpeg.swr_init(pSwrContext).ThrowExceptionIfError();
                }
            }
        }
        public VideoEncoder(int width, int height, int fps)
        {
            _converter = new VideoConverter(CODEC_PIXEL_FORMAT);

            AVCodec* codec = FFmpegInvoke.avcodec_find_encoder(CODEC_ID);
            if (codec == null) throw new Exception("Codec not found");

            _codec_context = FFmpegInvoke.avcodec_alloc_context3(codec);
            if (_codec_context == null) throw new Exception("Could not allocate video codec context");

            _codec_context->bit_rate = 50000;
            _codec_context->width = width;
            _codec_context->height = height;
            _codec_context->time_base = new AVRational() { num = 1, den = fps };
            _codec_context->gop_size = 10; // emit one intra frame every ten frames
            _codec_context->max_b_frames = 1;
            _codec_context->pix_fmt = CODEC_PIXEL_FORMAT;
            FFmpegInvoke.av_opt_set(_codec_context->priv_data, "preset", "fast", 0);
            if (FFmpegInvoke.avcodec_open2(_codec_context, codec, null) < 0) throw new Exception("Could not open codec");

            _avFrameYUV = FFmpegInvoke.avcodec_alloc_frame();
            if (_avFrameYUV == null) throw new Exception("Could not allocate video frame");
            _avFrameYUV->format = (int)CODEC_PIXEL_FORMAT;
            _avFrameYUV->width = width;
            _avFrameYUV->height = height;

            var ret1 = FFmpegInvoke.av_image_alloc(&_avFrameYUV->data_0, _avFrameYUV->linesize, width, height, CODEC_PIXEL_FORMAT, 32);
            if (ret1 < 0) throw new Exception("Could not allocate raw picture buffer");

            _avFrameBGR = FFmpegInvoke.avcodec_alloc_frame();
            if (_avFrameBGR == null) throw new Exception("Could not allocate video frame");
            _avFrameBGR->format = (int)INPUT_PIXEL_FORMAT;
            _avFrameBGR->width = width;
            _avFrameBGR->height = height;

            var ret2 = FFmpegInvoke.av_image_alloc(&_avFrameBGR->data_0, _avFrameBGR->linesize, width, height, INPUT_PIXEL_FORMAT, 32);
            if (ret2 < 0) throw new Exception("Could not allocate raw picture buffer");
        }
        /// <summary>
        /// Downloads the frame from the hardware into a software frame if possible.
        /// The input hardware frame gets freed and the return value will point to the new software frame
        /// </summary>
        /// <param name="codecContext">The codec context.</param>
        /// <param name="input">The input.</param>
        /// <param name="comesFromHardware">if set to <c>true</c> [comes from hardware] otherwise, hardware decoding was not perfomred.</param>
        /// <returns>
        /// The frame downloaded from the device into RAM
        /// </returns>
        /// <exception cref="Exception">Failed to transfer data to output frame</exception>
        public AVFrame *ExchangeFrame(AVCodecContext *codecContext, AVFrame *input, out bool comesFromHardware)
        {
            comesFromHardware = false;

            if (codecContext->hw_device_ctx == null)
            {
                return(input);
            }

            comesFromHardware = true;

            if (input->format != (int)PixelFormat)
            {
                return(input);
            }

            if (RequiresTransfer == false)
            {
                return(input);
            }

            var output = ffmpeg.av_frame_alloc();

            var result = ffmpeg.av_hwframe_transfer_data(output, input, 0);

            ffmpeg.av_frame_copy_props(output, input);
            if (result < 0)
            {
                ffmpeg.av_frame_free(&output);
                throw new Exception("Failed to transfer data to output frame");
            }

            ffmpeg.av_frame_free(&input);
            RC.Current.Remove((IntPtr)input);
            RC.Current.Add(output, $"86: {nameof(HardwareAccelerator)}[{PixelFormat}].{nameof(ExchangeFrame)}()");

            return(output);
        }
Exemple #13
0
        private static unsafe void Encode(AVCodecContext *ctx, AVFrame *frame, AVPacket *pkt, FileStream outfile)
        {
            var ret = ffmpeg.avcodec_send_frame(ctx, frame);

            if (ret < 0)
            {
                throw new ApplicationException("Error sending frame for encoding\n");
            }

            while (ret >= 0)
            {
                ret = ffmpeg.avcodec_receive_packet(ctx, pkt);

                if (ret == AVERROR_EAGAIN || ret == AVERROR_EOF)
                {
                    break;
                }
                if (ret < 0)
                {
                    throw new ApplicationException("Error during encoding\n");
                }

                if (pkt->size <= 0)
                {
                    Console.WriteLine($"Skipping empty packet for stream {pkt->stream_index}.");
                }

                var size   = pkt->size; // first byte is size;
                var target = new byte[size];
                for (var z = 0; z < size; ++z)
                {
                    target[z] = pkt->data[z + 1];
                }

                outfile.Write(target, 0, size);
                ffmpeg.av_packet_unref(pkt);
            }
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="AudioFrame" /> class.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <param name="component">The component.</param>
        internal AudioFrame(AVFrame *frame, MediaComponent component)
            : base(frame, component)
        {
            m_Pointer = (AVFrame *)InternalPointer;

            // Compute the timespans.
            // We don't use for Audio frames: frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame);
            StartTime = frame->pts == FFmpegEx.AV_NOPTS ?
                        TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) :
                        TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks);

            // Compute the audio frame duration
            if (frame->pkt_duration != 0)
            {
                Duration = frame->pkt_duration.ToTimeSpan(StreamTimeBase);
            }
            else
            {
                Duration = TimeSpan.FromTicks((long)Math.Round(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate, 0));
            }

            EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks);
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="AudioFrame" /> class.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <param name="component">The component.</param>
        internal AudioFrame(AVFrame *frame, MediaComponent component)
            : base(frame, component, MediaType.Audio)
        {
            // Compute the start time.
            frame->pts        = frame->best_effort_timestamp;
            HasValidStartTime = frame->pts != ffmpeg.AV_NOPTS_VALUE;
            StartTime         = frame->pts == ffmpeg.AV_NOPTS_VALUE ?
                                TimeSpan.FromTicks(0) :
                                TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks);

            // Compute the audio frame duration
            if (frame->pkt_duration != 0)
            {
                Duration = frame->pkt_duration.ToTimeSpan(StreamTimeBase);
            }
            else
            {
                Duration = TimeSpan.FromTicks(Convert.ToInt64(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate));
            }

            // Compute the audio frame end time
            EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks);
        }
Exemple #16
0
        public bool GetNextAudioFrame(AVFrame *frame)
        {
            if (ReceiveFrame(frame))
            {
                return(true);
            }

            // No next frame available --> send a new input packet
            AVPacket packet;

            Util.InitPacket(&packet);
            try
            {
                Input.ReadFramePacket(&packet);
                SendPacket(&packet);
            }
            finally
            {
                ffmpeg.av_packet_unref(&packet);
            }
            // Packet sent, try again to read a frame
            return(ReceiveFrame(frame));
        }
Exemple #17
0
        private void Initialize(int width, int height, AVPixelFormat inFormat)
        {
            _initialized = true;

            _pContext = FFmpegInvoke.sws_getContext(width, height, inFormat,
                                                    width, height, _pixelFormat,
                                                    FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);
            if (_pContext == null)
            {
                throw new VideoConverterException("Could not initialize the conversion context.");
            }

            _pCurrentFrame = FFmpegInvoke.avcodec_alloc_frame();

            int outputDataSize = FFmpegInvoke.avpicture_get_size(_pixelFormat, width, height);

            _outputData = new byte[outputDataSize];

            fixed(byte *pOutputData = &_outputData[0])
            {
                FFmpegInvoke.avpicture_fill((AVPicture *)_pCurrentFrame, pOutputData, _pixelFormat, width, height);
            }
        }
        public VideoFrameConverter(int srcWidth, int srcHeight, AVPixelFormat sourcePixelFormat,
                                   int dstWidth, int dstHeight, AVPixelFormat destinationPixelFormat)
        {
            _srcWidth       = srcWidth;
            _srcHeight      = srcHeight;
            _dstWidth       = dstWidth;
            _dstHeight      = dstHeight;
            _srcPixelFormat = sourcePixelFormat;
            _dstPixelFormat = destinationPixelFormat;

            _pConvertContext = ffmpeg.sws_getContext(srcWidth, srcHeight, sourcePixelFormat,
                                                     dstWidth, dstHeight, destinationPixelFormat,
                                                     ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (_pConvertContext == null)
            {
                throw new ApplicationException("Could not initialize the conversion context.");
            }

            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, dstWidth, dstHeight, 1).ThrowExceptionIfError();

            _convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
            _dstData     = new byte_ptrArray4();
            _dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref _dstData, ref _dstLinesize, (byte *)_convertedFrameBufferPtr, destinationPixelFormat, dstWidth, dstHeight, 1)
            .ThrowExceptionIfError();

            logger.LogDebug($"Successfully initialised ffmpeg based image converted for {srcWidth}:{srcHeight}:{sourcePixelFormat}->{dstWidth}:{dstHeight}:{_dstPixelFormat}.");


            _dstFrame         = ffmpeg.av_frame_alloc();
            _dstFrame->width  = _dstWidth;
            _dstFrame->height = _dstHeight;
            _dstFrame->data.UpdateFrom(_dstData);
            _dstFrame->linesize.UpdateFrom(_dstLinesize);
            _dstFrame->format = (int)_dstPixelFormat;
        }
        public void StoreFrameFromDecoder(AudioDecoder decoder, Resampler resampler = null)
        {
            AVFrame *         inputFrame      = null;
            AudioSampleBuffer buffer          = null;
            AudioSampleBuffer resampledBuffer = null;

            try
            {
                Util.InitInputFrame(&inputFrame);
                if (decoder.GetNextAudioFrame(inputFrame))
                {
                    buffer = new AudioSampleBuffer(inputFrame);
                    if (resampler != null)
                    {
                        resampler.Resample(buffer, out resampledBuffer, inputFrame->nb_samples);
                        AddSamples(resampledBuffer, inputFrame->nb_samples);
                    }
                    else
                    {
                        AddSamples(buffer, inputFrame->nb_samples);
                    }
                }
            }
            finally
            {
                if (buffer != null)
                {
                    buffer.Dispose();
                }
                if (resampledBuffer != null)
                {
                    resampledBuffer.Dispose();
                }
                ffmpeg.av_frame_free(&inputFrame);
            }
        }
Exemple #20
0
    public void WriteFrame(AVFrame *srcFrame)
    {
        // 프레임을 쓰기 가능으로 만들기
        ffmpeg.av_frame_make_writable(this._frame);

        // 픽셀 포멧 변환
        ffmpeg.sws_scale(this._convertContext,
                         srcFrame->data, srcFrame->linesize, 0, this.videoHeight,
                         this._frame->data, this._frame->linesize);

        // 프레임 인덱스 설정
        this._frame->pts = this._frameIndex++;

        // 인코딩
        ffmpeg.avcodec_send_frame(this._codecContext, this._frame);
        ffmpeg.avcodec_receive_packet(this._codecContext, this._packet);

        // 스트림으로 전송
        ffmpeg.av_packet_rescale_ts(this._packet, this._codecContext->time_base, this._avStream->time_base);
        this._packet->stream_index = this._avStream->index;
        ffmpeg.av_write_frame(this._formatContext, this._packet);

        ffmpeg.av_packet_unref(this._packet);
    }
    public VideoStreamDecoder(string fileName, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
    {
        _pFormatContext = ffmpeg.avformat_alloc_context();

        if (_pFormatContext == null)
        {
            throw new Exception("Could not allocate the format context");
        }

        _receivedFrame = ffmpeg.av_frame_alloc();
        var pFormatContext = _pFormatContext;

        ffmpeg.avformat_open_input(&pFormatContext, fileName, null, null);
        ffmpeg.avformat_find_stream_info(_pFormatContext, null);

        AVCodec *codec = null;

        _streamIndex =
            ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);
        _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);

        if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0);
        }

        ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar);
        ffmpeg.avcodec_open2(_pCodecContext, codec, null);

        CodecName   = ffmpeg.avcodec_get_name(codec->id);
        FrameSize   = new Size(_pCodecContext->width, _pCodecContext->height);
        PixelFormat = _pCodecContext->pix_fmt;

        _pPacket = ffmpeg.av_packet_alloc();
        _pFrame  = ffmpeg.av_frame_alloc();
    }
        public AVFrame *Convert(AVFrame *framep)
        {
            if (this.IsConvert)
            {
                ffmpeg.sws_scale(convert_context, framep->data, framep->linesize, 0, framep->height, _dstData, _dstLinesize);

                AVFrame *tmp = ffmpeg.av_frame_alloc();
                tmp = ffmpeg.av_frame_alloc();
                tmp->best_effort_timestamp = framep->best_effort_timestamp;
                tmp->width  = FrameSize.Width;
                tmp->height = FrameSize.Height;
                tmp->data   = new byte_ptrArray8();
                tmp->data.UpdateFrom(_dstData);
                tmp->linesize = new int_array8();
                tmp->linesize.UpdateFrom(_dstLinesize);

                ffmpeg.av_frame_unref(framep);
                return(tmp);
            }
            else
            {
                return(framep);
            }
        }
Exemple #23
0
        internal H264Decoder()
        {
            try
            {
                RegisterFFmpegBinaries();
                var pCodec = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264);

                _pCodecCtx               = ffmpeg.avcodec_alloc_context3(pCodec);
                _pCodecCtx->qcompress    = 1F;
                _pCodecCtx->frame_number = 1;
                _pCodecCtx->codec_type   = AVMediaType.AVMEDIA_TYPE_VIDEO;
                _pCodecParserCtx         = ffmpeg.av_parser_init((int)AVCodecID.AV_CODEC_ID_H264);
                if (null == _pCodecParserCtx)
                {
                    throw new Exception("_pCodecParserCtx is null");
                }

                if (pCodec->capabilities > 0)
                {
                    _pCodecCtx->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
                }


                var ret = ffmpeg.avcodec_open2(_pCodecCtx, pCodec, null);
                if (ret < 0)
                {
                    throw new Exception("ret is null");
                }

                _pFrame = ffmpeg.av_frame_alloc();
            }
            catch
            {
                throw new Exception("dll is null");
            }
        }
 public static extern int swr_convert_frame(SwrContext * @swr, AVFrame * @output, AVFrame * @input);
Exemple #25
0
 /// <summary>
 /// Adds the specified frame.
 /// </summary>
 /// <param name="frame">The frame.</param>
 /// <param name="location">The location.</param>
 public void Add(AVFrame *frame, string location)
 {
     Add(UnmanagedType.Frame, new IntPtr(frame), location);
 }
Exemple #26
0
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                string ffmpegPath = string.Format(@"../../../FFmpeg/bin/windows/{0}", Environment.Is64BitProcess ? "x64" : "x86");
                InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                break;

            case PlatformID.Unix:
            case PlatformID.MacOSX:
                string libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                break;
            }

            // decode 100 frame from url or path

            //string url = @"../../sample_mpeg4.mp4";
            string url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg";

            FFmpegInvoke.av_register_all();
            FFmpegInvoke.avcodec_register_all();
            FFmpegInvoke.avformat_network_init();


            AVFormatContext *pFormatContext = FFmpegInvoke.avformat_alloc_context();

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new Exception("Could not open file");
            }

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new Exception("Could not find stream info");
            }

            AVStream *pStream = null;

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new Exception("Could not found video stream");
            }

            AVCodecContext codecContext    = *(pStream->codec);
            int            width           = codecContext.width;
            int            height          = codecContext.height;
            AVPixelFormat  sourcePixFmt    = codecContext.pix_fmt;
            AVCodecID      codecId         = codecContext.codec_id;
            var            convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
            SwsContext *   pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
                                                                         width, height, convertToPixFmt,
                                                                         FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (pConvertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            var pConvertedFrame          = (AVPicture *)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
            var pConvertedFrameBuffer    = (byte *)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);

            FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            AVCodec *pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new Exception("Unsupported codec");
            }

            // Reusing codec context from stream info,
            // as an alternative way it could look like this: (but it works not for all kind of codecs)
            // AVCodecContext* pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);
            AVCodecContext *pCodecContext = &codecContext;

            if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;
            }

            if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            AVFrame *pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            var       packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            FFmpegInvoke.av_init_packet(pPacket);

            int frameNumber = 0;

            while (frameNumber < 100)
            {
                if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    throw new Exception("Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                int gotPicture = 0;
                int size       = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
                if (size < 0)
                {
                    throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));
                }

                if (gotPicture == 1)
                {
                    Console.WriteLine("frame: {0}", frameNumber);

                    byte **src = &pDecodedFrame->data_0;
                    byte **dst = &pConvertedFrame->data_0;
                    FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
                                           height, dst, pConvertedFrame->linesize);

                    byte *convertedFrameAddress = pConvertedFrame->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    int linesize = pConvertedFrame->linesize[0];
                    using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                    }

                    frameNumber++;

                    System.Threading.Thread.Sleep(1000);
                }
            }

            FFmpegInvoke.av_free(pConvertedFrame);
            FFmpegInvoke.av_free(pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(pConvertContext);

            FFmpegInvoke.av_free(pDecodedFrame);
            FFmpegInvoke.avcodec_close(pCodecContext);
            FFmpegInvoke.avformat_close_input(&pFormatContext);
        }
Exemple #27
0
        static void Main(string[] args)
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            int      ret;
            AVPacket packet; //= { .data = NULL, .size = 0 };

            packet.data = null;
            packet.size = 0;
            AVFrame *frame = null;

            AVMediaType type;
            int         stream_index;
            int         i;

            //ffmpeg.av_register_all();
            //ffmpeg.avfilter_register_all();
            if ((ret = OpenInputFile("rtsp://113.136.42.40:554/PLTV/88888888/224/3221226090/10000100000000060000000001759099_0.smil")) < 0)
            {
                goto end;
            }
            if ((ret = OpenOutputFile("E:\\hls\\out.m3u8")) < 0)
            {
                goto end;
            }
            //var avBitStreamFilter = ffmpeg.av_bsf_get_by_name("h264_mp4toannexb");
            //fixed (AVBSFContext** ctx = &absCtx)
            //ffmpeg.av_bsf_alloc(avBitStreamFilter, ctx);
            //ffmpeg.av_bsf_init(absCtx);
            /* read all packets */
            int count = 0;
            int flag  = 1;

            while (true)
            {
                if ((ret = ffmpeg.av_read_frame(ifmt_ctx, &packet)) < 0)
                {
                    break;
                }
                stream_index = packet.stream_index;
                type         = ifmt_ctx->streams[packet.stream_index]->codec->codec_type;
                ffmpeg.av_log(null, ffmpeg.AV_LOG_DEBUG, "Demuxer gave frame of stream_index %u\n");

                ffmpeg.av_log(null, ffmpeg.AV_LOG_DEBUG, "Going to reencode&filter the frame\n");
                frame = ffmpeg.av_frame_alloc();
                if (null == frame)
                {
                    ret = ffmpeg.AVERROR(12);
                    break;
                }
                ffmpeg.av_packet_rescale_ts(&packet,
                                            ifmt_ctx->streams[stream_index]->time_base,
                                            ifmt_ctx->streams[stream_index]->codec->time_base);

                ret = dec_func(ifmt_ctx->streams[stream_index]->codec, frame, &packet);
                if (ret < 0)
                {
                    ffmpeg.av_frame_free(&frame);
                    ffmpeg.av_log(null, ffmpeg.AV_LOG_ERROR, "Decoding failed\n");
                    break;
                }
                //if (got_frame == 0)
                //{
                frame->pts = frame->pkt_pts;
                // frame->pts = av_frame_get_best_effort_timestamp(frame);
                // frame->pts=count;
                if (type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    ret = encode_write_frame(frame, stream_index, null);
                }
                else
                {
                    if (flag != 0)
                    {
                        InitSwr(stream_index);
                        flag = 0;
                    }

                    AVFrame *frame_out = ffmpeg.av_frame_alloc();
                    if (0 != TransSample(frame, frame_out, stream_index))
                    {
                        ffmpeg.av_log(null, ffmpeg.AV_LOG_ERROR, "convert audio failed\n");
                        ret = -1;
                    }
                    // frame_out->pts = frame->pkt_pts;
                    ret = encode_write_frame(frame_out, stream_index, null);
                    ffmpeg.av_frame_free(&frame_out);
                }
                ffmpeg.av_frame_free(&frame);
                if (ret < 0)
                {
                    goto end;
                }
                //}
                //else
                //{
                //    ffmpeg.av_frame_free(&frame);
                //}

                ffmpeg.av_packet_unref(&packet);
                ++count;
            }
            /* flush  encoders */
            // for (i = 0; i < ifmt_ctx->nb_streams; i++) {
            // ret = flush_encoder(i);
            // if (ret < 0) {
            // av_log(NULL, AV_LOG_ERROR, "Flushing encoder failed\n");
            // goto end;
            // }
            // }
            ffmpeg.av_log(null, ffmpeg.AV_LOG_ERROR, "Flushing encoder failed\n");
            ffmpeg.av_write_trailer(ofmt_ctx);
end:
            ffmpeg.av_packet_unref(&packet);
            ffmpeg.av_frame_free(&frame);
            //fixed (AVBSFContext** ctx = &absCtx)
            //ffmpeg.av_bsf_free(ctx);
            for (i = 0; i < ifmt_ctx->nb_streams; i++)
            {
                ffmpeg.avcodec_close(ifmt_ctx->streams[i]->codec);
                if (ofmt_ctx != null && ofmt_ctx->nb_streams > i && ofmt_ctx->streams[i] != null && ofmt_ctx->streams[i]->codec != null)
                {
                    ffmpeg.avcodec_close(ofmt_ctx->streams[i]->codec);
                }
            }
            // av_free(filter_ctx);
            fixed(AVFormatContext **ss = &ifmt_ctx)
            ffmpeg.avformat_close_input(ss);

            if (ofmt_ctx != null && (ofmt_ctx->oformat->flags & ffmpeg.AVFMT_NOFILE) == 0)
            {
                ffmpeg.avio_closep(&ofmt_ctx->pb);
            }
            ffmpeg.avformat_free_context(ofmt_ctx);

            // if (ret < 0)
            // av_log(NULL, AV_LOG_ERROR, "Error occurred: %s\n", av_err2str(ret)); //av_err2str(ret));
        }
Exemple #28
0
        /// <summary>
        /// Initializes a new instance of the <see cref="VideoFrame" /> class.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <param name="component">The component.</param>
        internal VideoFrame(AVFrame *frame, MediaComponent component)
            : base(frame, component)
        {
            const int AV_TIMECODE_STR_SIZE = 16 + 1;

            m_Pointer = (AVFrame *)InternalPointer;

            var repeatFactor = 1d + (0.5d * frame->repeat_pict);
            var timeBase     = ffmpeg.av_guess_frame_rate(component.Container.InputContext, component.Stream, frame);

            Duration = repeatFactor.ToTimeSpan(new AVRational {
                num = timeBase.den, den = timeBase.num
            });

            // for video frames, we always get the best effort timestamp as dts and pts might
            // contain different times.
            frame->pts = ffmpeg.av_frame_get_best_effort_timestamp(frame);

            HasValidStartTime = frame->pts != FFmpegEx.AV_NOPTS;
            StartTime         = frame->pts == FFmpegEx.AV_NOPTS ?
                                TimeSpan.FromTicks(component.Container.MediaStartTimeOffset.Ticks) :
                                TimeSpan.FromTicks(frame->pts.ToTimeSpan(StreamTimeBase).Ticks - component.Container.MediaStartTimeOffset.Ticks);

            EndTime = TimeSpan.FromTicks(StartTime.Ticks + Duration.Ticks);

            DisplayPictureNumber = frame->display_picture_number == 0 ?
                                   (int)Math.Round((double)StartTime.Ticks / Duration.Ticks, 0) : 0;

            CodedPictureNumber = frame->coded_picture_number;

            // SMTPE timecode calculation
            var timeCodeInfo     = (AVTimecode *)ffmpeg.av_malloc((ulong)Marshal.SizeOf(typeof(AVTimecode)));
            var startFrameNumber = (int)Math.Round((double)component.StartTimeOffset.Ticks / Duration.Ticks, 0);

            ffmpeg.av_timecode_init(timeCodeInfo, timeBase, 0, startFrameNumber, null);
            var isNtsc      = timeBase.num == 30000 && timeBase.den == 1001;
            var frameNumber = isNtsc ?
                              ffmpeg.av_timecode_adjust_ntsc_framenum2(DisplayPictureNumber, (int)timeCodeInfo->fps) :
                              DisplayPictureNumber;

            var timeCode       = ffmpeg.av_timecode_get_smpte_from_framenum(timeCodeInfo, DisplayPictureNumber);
            var timeCodeBuffer = (byte *)ffmpeg.av_malloc(AV_TIMECODE_STR_SIZE);

            ffmpeg.av_timecode_make_smpte_tc_string(timeCodeBuffer, timeCode, 1);
            SmtpeTimecode = Marshal.PtrToStringAnsi(new IntPtr(timeCodeBuffer));

            ffmpeg.av_free(timeCodeInfo);
            ffmpeg.av_free(timeCodeBuffer);

            // Process side data such as CC packets
            for (var i = 0; i < frame->nb_side_data; i++)
            {
                var sideData = frame->side_data[i];

                // Get the Closed-Caption packets
                if (sideData->type == AVFrameSideDataType.AV_FRAME_DATA_A53_CC)
                {
                    // Parse 3 bytes at a time
                    for (var p = 0; p < sideData->size; p += 3)
                    {
                        var packet = new ClosedCaptionPacket(StartTime, sideData->data[p + 0], sideData->data[p + 1], sideData->data[p + 2]);
                        if (packet.PacketType == CCPacketType.NullPad || packet.PacketType == CCPacketType.Unrecognized)
                        {
                            continue;
                        }

                        // at this point, we have valid CC data
                        ClosedCaptions.Add(packet);
                    }

                    continue;
                }
            }
        }
        public void Open(string FileName)
        {
            DecoderConfig.Init();

            AVFormatContext* pFormatContext = FFmpegInvoke.avformat_alloc_context();
            _pFormatContext = pFormatContext;

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, FileName, null, null) != 0)
                throw new Exception("Could not open file");

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
                throw new Exception("Could not find stream info");

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    _pStream = pFormatContext->streams[i];
                    break;
                }
            }

            if (_pStream == null)
                throw new Exception("Could not found video stream");
            AVCodecContext codecContext = *(_pStream->codec);
            codecContext.workaround_bugs = FFmpegInvoke.FF_BUG_AUTODETECT;

            _frameduration = 1 / q2d(_pStream->r_frame_rate);
            FrameCount = _pStream->nb_frames;
            Duration = (float)pFormatContext->duration / FFmpegInvoke.AV_TIME_BASE;
            Width = codecContext.width;
            Height = codecContext.height;

            AVPixelFormat sourcePixFmt = codecContext.pix_fmt;
            AVCodecID codecId = codecContext.codec_id;
            var convertToPixFmt = AVPixelFormat.AV_PIX_FMT_RGB24;
            _pConvertContext = FFmpegInvoke.sws_getContext(Width, Height, sourcePixFmt,
                                                                       Width, Height, convertToPixFmt,
                                                                       FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (_pConvertContext == null)
                throw new Exception("Could not initialize the conversion context");

            _pConvertedFrame = (AVPicture*)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, Width, Height);
            _pConvertedFrameBuffer = (byte*)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);
            FFmpegInvoke.avpicture_fill(_pConvertedFrame, _pConvertedFrameBuffer, convertToPixFmt, Width, Height);

            AVCodec* pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);
            if (pCodec == null)
                throw new Exception("Unsupported codec");

            if (FFmpegInvoke.avcodec_open2(_pStream->codec, pCodec, null) < 0)
                throw new Exception("Could not open codec");

            _pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            _packet = new AVPacket();

            fixed (AVPacket* pPacket = &_packet)
            {
                FFmpegInvoke.av_init_packet(pPacket);
            }

            _opened = true;
        }
 /// <inheritdoc />
 public unsafe void OnVideoFrameDecoded(AVFrame *videoFrame, AVFormatContext *context) =>
 Parent?.RaiseVideoFrameDecodedEvent(videoFrame, context);
Exemple #31
0
        private void ReadFrames()
        {
            AVFrame *   pConvertedFrame       = null;
            sbyte *     pConvertedFrameBuffer = null;
            SwsContext *pConvertContext       = null;

            BufferedWaveProvider waveProvider  = null;
            SampleChannel        sampleChannel = null;

            bool audioInited = false;
            bool videoInited = false;
            var  packet      = new AVPacket();

            do
            {
                ffmpeg.av_init_packet(&packet);

                AVFrame *frame = ffmpeg.av_frame_alloc();
                ffmpeg.av_frame_unref(frame);

                if (ffmpeg.av_read_frame(_formatContext, &packet) < 0)
                {
                    _stopReadingFrames = true;
                    _res = ReasonToFinishPlaying.VideoSourceError;
                    break;
                }

                if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT)
                {
                    break;
                }

                AVPacket packetTemp = packet;
                var      nf         = NewFrame;
                var      da         = DataAvailable;

                _lastPacket = DateTime.UtcNow;
                if (_audioStream != null && packetTemp.stream_index == _audioStream->index)
                {
                    if (HasAudioStream != null)
                    {
                        HasAudioStream?.Invoke(this, EventArgs.Empty);
                        HasAudioStream = null;
                    }
                    if (da != null)
                    {
                        int  s       = 0;
                        var  buffer  = new sbyte[_audioCodecContext->sample_rate * 2];
                        var  tbuffer = new sbyte[_audioCodecContext->sample_rate * 2];
                        bool b       = false;

                        fixed(sbyte **outPtrs = new sbyte *[32])
                        {
                            fixed(sbyte *bPtr = &tbuffer[0])
                            {
                                outPtrs[0] = bPtr;
                                do
                                {
                                    int gotFrame = 0;
                                    int inUsed   = ffmpeg.avcodec_decode_audio4(_audioCodecContext, frame, &gotFrame,
                                                                                &packetTemp);

                                    if (inUsed < 0 || gotFrame == 0)
                                    {
                                        b = true;
                                        break;
                                    }

                                    int numSamplesOut = ffmpeg.swr_convert(_swrContext,
                                                                           outPtrs,
                                                                           _audioCodecContext->sample_rate,
                                                                           &frame->data0,
                                                                           frame->nb_samples);

                                    var l = numSamplesOut * 2 * _audioCodecContext->channels;
                                    Buffer.BlockCopy(tbuffer, 0, buffer, s, l);
                                    s += l;


                                    packetTemp.data += inUsed;
                                    packetTemp.size -= inUsed;
                                } while (packetTemp.size > 0);
                            }
                        }

                        if (b)
                        {
                            break;
                        }

                        ffmpeg.av_free_packet(&packet);
                        ffmpeg.av_frame_free(&frame);


                        if (!audioInited)
                        {
                            audioInited     = true;
                            RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16,
                                                             _audioCodecContext->channels);
                            waveProvider = new BufferedWaveProvider(RecordingFormat)
                            {
                                DiscardOnBufferOverflow = true,
                                BufferDuration          =
                                    TimeSpan.FromMilliseconds(500)
                            };
                            sampleChannel = new SampleChannel(waveProvider);

                            sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                        }

                        byte[] ba = new byte[s];
                        Buffer.BlockCopy(buffer, 0, ba, 0, s);


                        waveProvider.AddSamples(ba, 0, s);

                        var sampleBuffer = new float[s];
                        int read         = sampleChannel.Read(sampleBuffer, 0, s);


                        da(this, new DataAvailableEventArgs(ba, read));


                        if (Listening)
                        {
                            WaveOutProvider?.AddSamples(ba, 0, read);
                        }
                    }
                }

                if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index)
                {
                    int frameFinished = 0;
                    //decode video frame

                    int ret = ffmpeg.avcodec_decode_video2(_codecContext, frame, &frameFinished, &packetTemp);
                    if (ret < 0)
                    {
                        ffmpeg.av_free_packet(&packet);
                        ffmpeg.av_frame_free(&frame);
                        break;
                    }

                    if (frameFinished == 1)
                    {
                        if (!videoInited)
                        {
                            videoInited     = true;
                            pConvertedFrame = ffmpeg.av_frame_alloc();
                            var convertedFrameBufferSize = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_BGR24,
                                                                                     _codecContext->width, _codecContext->height);

                            pConvertedFrameBuffer = (sbyte *)ffmpeg.av_malloc((ulong)convertedFrameBufferSize);

                            ffmpeg.avpicture_fill((AVPicture *)pConvertedFrame, pConvertedFrameBuffer,
                                                  AVPixelFormat.AV_PIX_FMT_BGR24, _codecContext->width, _codecContext->height);

                            pConvertContext = ffmpeg.sws_getContext(_codecContext->width, _codecContext->height,
                                                                    _codecContext->pix_fmt, _codecContext->width, _codecContext->height,
                                                                    AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
                        }
                        var src       = &frame->data0;
                        var dst       = &pConvertedFrame->data0;
                        var srcStride = frame->linesize;
                        var dstStride = pConvertedFrame->linesize;
                        ffmpeg.sws_scale(pConvertContext, src, srcStride, 0, _codecContext->height, dst, dstStride);

                        var convertedFrameAddress = pConvertedFrame->data0;
                        if (convertedFrameAddress != null)
                        {
                            var imageBufferPtr = new IntPtr(convertedFrameAddress);

                            var linesize = dstStride[0];

                            if (frame->decode_error_flags > 0)
                            {
                                ffmpeg.av_free_packet(&packet);
                                ffmpeg.av_frame_free(&frame);
                                break;
                            }

                            using (
                                var mat = new Bitmap(_codecContext->width, _codecContext->height, linesize,
                                                     PixelFormat.Format24bppRgb, imageBufferPtr))
                            {
                                var nfe = new NewFrameEventArgs((Bitmap)mat.Clone());
                                nf.Invoke(this, nfe);
                            }

                            _lastVideoFrame = DateTime.UtcNow;
                        }
                    }
                }

                if (_videoStream != null)
                {
                    if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout)
                    {
                        _res = ReasonToFinishPlaying.DeviceLost;
                        _stopReadingFrames = true;
                    }
                }

                ffmpeg.av_free_packet(&packet);
                ffmpeg.av_frame_free(&frame);
            } while (!_stopReadingFrames && !MainForm.ShuttingDown);


            try
            {
                Program.FfmpegMutex.WaitOne();

                if (pConvertedFrame != null)
                {
                    ffmpeg.av_free(pConvertedFrame);
                }

                if (pConvertedFrameBuffer != null)
                {
                    ffmpeg.av_free(pConvertedFrameBuffer);
                }

                if (_formatContext != null)
                {
                    if (_formatContext->streams != null)
                    {
                        int j = (int)_formatContext->nb_streams;
                        for (var i = j - 1; i >= 0; i--)
                        {
                            AVStream *stream = _formatContext->streams[i];

                            if (stream != null && stream->codec != null && stream->codec->codec != null)
                            {
                                stream->discard = AVDiscard.AVDISCARD_ALL;
                                ffmpeg.avcodec_close(stream->codec);
                            }
                        }
                    }
                    fixed(AVFormatContext **f = &_formatContext)
                    {
                        ffmpeg.avformat_close_input(f);
                    }
                    _formatContext = null;
                }

                _videoStream       = null;
                _audioStream       = null;
                _audioCodecContext = null;
                _codecContext      = null;

                if (_swrContext != null)
                {
                    fixed(SwrContext **s = &_swrContext)
                    {
                        ffmpeg.swr_free(s);
                    }
                    _swrContext = null;
                }

                if (pConvertContext != null)
                {
                    ffmpeg.sws_freeContext(pConvertContext);
                }

                if (sampleChannel != null)
                {
                    sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                    sampleChannel = null;
                }
            }
            catch (Exception ex)
            {
                Logger.LogException(ex, "Media Stream (close)");
            }
            finally
            {
                try
                {
                    Program.FfmpegMutex.ReleaseMutex();
                }
                catch
                {
                }
            }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
        }
        /// <summary>
        /// Initializes the audio.
        /// </summary>
        /// <exception cref="System.Exception">
        /// Unsupported audio codec
        /// or
        /// Could not create audio output codec context from input
        /// or
        /// Could not open codec
        /// </exception>
        /// <exception cref="System.InvalidOperationException">Could not load media file</exception>
        private void InitializeAudio()
        {
            // Extract wave sample format and codec id
            var inputCodecContext = *(InputAudioStream->codec);
            var inputCodecId = inputCodecContext.codec_id;

            // Get an input decoder for the input codec
            AVCodec* inputDecoder = ffmpeg.avcodec_find_decoder(inputCodecId);
            if (inputDecoder == null)
                throw new Exception("Unsupported audio codec");

            //Create an output codec context. -- We copy the data from the input context and we
            //then proceed to adjust some output parameters.
            // Before it said: var outputCodecContext = &inputCodecContext;
            AudioCodecContext = ffmpeg.avcodec_alloc_context3(inputDecoder);
            if (ffmpeg.avcodec_copy_context(AudioCodecContext, &inputCodecContext) != Constants.SuccessCode)
                throw new Exception("Could not create audio output codec context from input");

            if ((inputDecoder->capabilities & (int)ffmpeg.CODEC_CAP_TRUNCATED) == (int)ffmpeg.CODEC_CAP_TRUNCATED)
                AudioCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;

            if (ffmpeg.avcodec_open2(AudioCodecContext, inputDecoder, null) < Constants.SuccessCode)
                throw new Exception("Could not open codec");

            // setup basic properties
            AudioBytesPerSample = ffmpeg.av_get_bytes_per_sample(AudioCodecContext->sample_fmt);
            AudioCodec = inputCodecContext.codec_id.ToString();
            AudioChannels = inputCodecContext.channels;
            AudioBitrate = (int)inputCodecContext.bit_rate;
            AudioOutputBitsPerSample = ffmpeg.av_get_bytes_per_sample(Constants.AudioOutputSampleFormat) * 8;
            AudioSampleRate = inputCodecContext.sample_rate;
            AudioOutputSampleRate = AudioSampleRate > 44100 ? 44100 : AudioSampleRate; // We set a max of 44.1 kHz to save CPU. Anything more is too much (for most people).

            // Reference: http://www.ffmpeg.org/doxygen/2.0/group__lswr.html
            // Used Example: https://github.com/FFmpeg/FFmpeg/blob/7206b94fb893c63b187bcdfe26422b4e026a3ea0/doc/examples/resampling_audio.c
            AudioResampler = ffmpeg.swr_alloc();
            ffmpeg.av_opt_set_int(AudioResampler, "in_channel_layout", (long)AudioCodecContext->channel_layout, 0);
            ffmpeg.av_opt_set_int(AudioResampler, "out_channel_layout", (long)(ffmpeg.AV_CH_FRONT_LEFT | ffmpeg.AV_CH_FRONT_RIGHT), 0);
            ffmpeg.av_opt_set_int(AudioResampler, "in_sample_rate", AudioSampleRate, 0);
            ffmpeg.av_opt_set_int(AudioResampler, "out_sample_rate", AudioOutputSampleRate, 0);
            ffmpeg.av_opt_set_sample_fmt(AudioResampler, "in_sample_fmt", AudioCodecContext->sample_fmt, 0);
            ffmpeg.av_opt_set_sample_fmt(AudioResampler, "out_sample_fmt", Constants.AudioOutputSampleFormat, 0);
            ffmpeg.swr_init(AudioResampler);

            // All output frames will have the same length and will be held by the same structure; the Decoder frame holder.
            DecodedWaveHolder = ffmpeg.av_frame_alloc();

            // Ensure proper audio properties
            if (AudioOutputBitsPerSample <= 0 || AudioSampleRate <= 0)
                throw new InvalidOperationException("Could not load media file");
        }
        private void InitializeVideo()
        {
            // Extract pixel format and codec id
            var inputCodecContext = *(InputVideoStream->codec);
            var inputPixelFormat = inputCodecContext.pix_fmt;
            var inputCodecId = inputCodecContext.codec_id;

            // Populate basic properties
            VideoCodec = inputCodecContext.codec_id.ToString(); // Utils.GetAnsiString(new IntPtr(inputCodecContext.codec_name));
            VideoBitrate = (int)inputCodecContext.bit_rate;
            VideoFrameWidth = inputCodecContext.width;
            VideoFrameHeight = inputCodecContext.height;

            VideoFrameRate = Convert.ToDecimal(Convert.ToDouble(inputCodecContext.framerate.num) / Convert.ToDouble(inputCodecContext.framerate.den));
            VideoFrameLength = VideoFrameRate > 0M ? 1M / VideoFrameRate : 0M;

            // Get an input decoder for the input codec
            AVCodec* inputDecoder = ffmpeg.avcodec_find_decoder(inputCodecId);
            if (inputDecoder == null)
                throw new Exception("Unsupported video codec");

            // Create a Software Sacaling context -- this allows us to do fast colorspace conversion
            VideoResampler = ffmpeg.sws_getContext(
                VideoFrameWidth, VideoFrameHeight, inputPixelFormat,
                VideoFrameWidth, VideoFrameHeight, Constants.VideoOutputPixelFormat,
                (int)ffmpeg.SWS_BILINEAR, null, null, null);

            if (VideoResampler == null)
                throw new Exception("Could not initialize the output conversion context");

            //Create an output codec context. -- We copy the data from the input context and we
            //then proceed to adjust some output parameters.
            // Before it said: var outputCodecContext = &inputCodecContext;
            VideoCodecContext = ffmpeg.avcodec_alloc_context3(inputDecoder);
            if (ffmpeg.avcodec_copy_context(VideoCodecContext, &inputCodecContext) != Constants.SuccessCode)
                throw new Exception("Could not create video output codec context from input");

            if ((inputDecoder->capabilities & (int)ffmpeg.AV_CODEC_CAP_TRUNCATED) == (int)ffmpeg.AV_CODEC_CAP_TRUNCATED)
                VideoCodecContext->flags |= (int)ffmpeg.AV_CODEC_FLAG_TRUNCATED;

            if (ffmpeg.avcodec_open2(VideoCodecContext, inputDecoder, null) < Constants.SuccessCode)
                throw new Exception("Could not open codec");

            // All output frames will have the same length and will be held by the same structure; the Decoder frame holder.
            DecodedPictureHolder = ffmpeg.av_frame_alloc();
            OutputPictureBufferLength = ffmpeg.avpicture_get_size(Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight);
        }
        /// <summary>
        /// Releases all managed and unmanaged resources
        /// </summary>
        public void Dispose()
        {
            if (IsCancellationPending)
                return;

            this.IsCancellationPending = true;

            this.VideoRenderTimer.Stop();

            if (this.AudioRenderer != null)
            {
                if (this.AudioRenderer.HasInitialized)
                    this.AudioRenderer.Stop();

                this.AudioRenderer.Dispose();
                this.AudioRenderer = null;
            }

            if (MediaFrameExtractorThread != null)
            {
                MediaFrameExtractorThread.Join();
                MediaFrameExtractorThread = null;
            }

            if (MediaFramesExtractedDone != null)
            {
                try
                {
                    MediaFramesExtractedDone.Dispose();
                    MediaFramesExtractedDone = null;
                }
                finally { }
            }

            if (PrimaryFramesCache != null)
            {
                PrimaryFramesCache.Clear();
                PrimaryFramesCache = null;
            }

            if (SecondaryFramesCache != null)
            {
                SecondaryFramesCache.Clear();
                SecondaryFramesCache = null;
            }

            if (VideoCodecContext != null)
            {
                fixed (AVCodecContext** videoCodecContextRef = &VideoCodecContext)
                {
                    ffmpeg.avcodec_close(VideoCodecContext);
                    ffmpeg.avcodec_free_context(videoCodecContextRef);
                    VideoCodecContext = null;
                }
            }

            if (AudioCodecContext != null)
            {
                fixed (AVCodecContext** audioCodecContextRef = &AudioCodecContext)
                {
                    ffmpeg.avcodec_close(AudioCodecContext);
                    ffmpeg.avcodec_free_context(audioCodecContextRef);
                    AudioCodecContext = null;
                }
            }

            if (VideoResampler != null)
            {
                ffmpeg.sws_freeContext(VideoResampler);
                VideoResampler = null;
            }

            if (AudioResampler != null)
            {
                fixed (SwrContext** audioResamplerRef = &AudioResampler)
                {
                    ffmpeg.swr_close(AudioResampler);
                    ffmpeg.swr_free(audioResamplerRef);
                    AudioResampler = null;
                }
            }

            if (InputFormatContext != null)
            {
                fixed (AVFormatContext** inputFormatContextRef = &InputFormatContext)
                {
                    ffmpeg.avformat_close_input(inputFormatContextRef);
                    ffmpeg.avformat_free_context(InputFormatContext);
                    InputFormatContext = null;
                }
            }

            if (DecodedPictureHolder != null)
            {
                ffmpeg.av_free(DecodedPictureHolder);
                DecodedPictureHolder = null;
            }

            if (DecodedWaveHolder != null)
            {
                ffmpeg.av_free(DecodedWaveHolder);
                DecodedWaveHolder = null;
            }

        }
        protected override void Dispose(bool disposing)
        {
            if (m_avFrame != null)
            {
                FFmpeg.av_free(m_avFrame);
                m_avFrame = null;
            }

            if (m_avPicture_allocated)
            {
                FFmpeg.avpicture_free(ref m_avPicture);
                m_avPicture_allocated = false;
            }

            base.Dispose(disposing);
        }
 public static extern int swr_config_frame(SwrContext * @swr, AVFrame * @out, AVFrame * @in);
Exemple #37
0
 public Initialised(AVFormatContext *inputContext, int streamIndex, AVCodecContext *decoderContext, AVPacket *packet, AVFrame *spareFrame)
 {
     this.inputContext   = inputContext;
     this.streamIndex    = streamIndex;
     this.decoderContext = decoderContext;
     this.packet         = packet;
     this.spareFrame     = spareFrame;
 }
        /// <summary>
        /// If necessary, disposes the existing filtergraph and creates a new one based on the frame arguments.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <exception cref="MediaContainerException">
        /// avfilter_graph_create_filter
        /// or
        /// avfilter_graph_create_filter
        /// or
        /// avfilter_link
        /// or
        /// avfilter_graph_parse
        /// or
        /// avfilter_graph_config
        /// </exception>
        private void InitializeFilterGraph(AVFrame *frame)
        {
            /*
             * References:
             * https://www.ffmpeg.org/doxygen/2.0/doc_2examples_2filtering_audio_8c-example.html
             */

            var frameArguments = ComputeFilterArguments(frame);

            if (string.IsNullOrWhiteSpace(CurrentFilterArguments) || frameArguments.Equals(CurrentFilterArguments) == false)
            {
                DestroyFiltergraph();
            }
            else
            {
                return;
            }

            FilterGraph = ffmpeg.avfilter_graph_alloc();
            RC.Current.Add(FilterGraph, $"264: {nameof(AudioComponent)}.{nameof(InitializeFilterGraph)}()");
            CurrentFilterArguments = frameArguments;

            try
            {
                var result = 0;

                fixed(AVFilterContext **source = &SourceFilter)
                fixed(AVFilterContext **sink = &SinkFilter)
                {
                    result = ffmpeg.avfilter_graph_create_filter(source, ffmpeg.avfilter_get_by_name("abuffer"), "audio_buffer", CurrentFilterArguments, null, FilterGraph);
                    if (result != 0)
                    {
                        throw new MediaContainerException(
                                  $"{nameof(ffmpeg.avfilter_graph_create_filter)} (audio_buffer) failed. Error {result}: {FFmpegEx.GetErrorMessage(result)}");
                    }

                    result = ffmpeg.avfilter_graph_create_filter(sink, ffmpeg.avfilter_get_by_name("abuffersink"), "audio_buffersink", null, null, FilterGraph);
                    if (result != 0)
                    {
                        throw new MediaContainerException(
                                  $"{nameof(ffmpeg.avfilter_graph_create_filter)} (audio_buffersink) failed. Error {result}: {FFmpegEx.GetErrorMessage(result)}");
                    }
                }

                if (string.IsNullOrWhiteSpace(FilterString))
                {
                    result = ffmpeg.avfilter_link(SourceFilter, 0, SinkFilter, 0);
                    if (result != 0)
                    {
                        throw new MediaContainerException($"{nameof(ffmpeg.avfilter_link)} failed. Error {result}: {FFmpegEx.GetErrorMessage(result)}");
                    }
                }
                else
                {
                    var initFilterCount = FilterGraph->nb_filters;

                    SourceOutput             = ffmpeg.avfilter_inout_alloc();
                    SourceOutput->name       = ffmpeg.av_strdup("in");
                    SourceOutput->filter_ctx = SourceFilter;
                    SourceOutput->pad_idx    = 0;
                    SourceOutput->next       = null;

                    SinkInput             = ffmpeg.avfilter_inout_alloc();
                    SinkInput->name       = ffmpeg.av_strdup("out");
                    SinkInput->filter_ctx = SinkFilter;
                    SinkInput->pad_idx    = 0;
                    SinkInput->next       = null;

                    result = ffmpeg.avfilter_graph_parse(FilterGraph, FilterString, SinkInput, SourceOutput, null);
                    if (result != 0)
                    {
                        throw new MediaContainerException($"{nameof(ffmpeg.avfilter_graph_parse)} failed. Error {result}: {FFmpegEx.GetErrorMessage(result)}");
                    }

                    // Reorder the filters to ensure that inputs of the custom filters are merged first
                    for (var i = 0; i < FilterGraph->nb_filters - initFilterCount; i++)
                    {
                        var sourceAddress = FilterGraph->filters[i];
                        var targetAddress = FilterGraph->filters[i + initFilterCount];
                        FilterGraph->filters[i] = targetAddress;
                        FilterGraph->filters[i + initFilterCount] = sourceAddress;
                    }
                }

                result = ffmpeg.avfilter_graph_config(FilterGraph, null);
                if (result != 0)
                {
                    throw new MediaContainerException($"{nameof(ffmpeg.avfilter_graph_config)} failed. Error {result}: {FFmpegEx.GetErrorMessage(result)}");
                }
            }
            catch (Exception ex)
            {
                Container.Logger?.Log(MediaLogMessageType.Error, $"Audio filter graph could not be built: {FilterString}.\r\n{ex.Message}");
                DestroyFiltergraph();
            }
        }
Exemple #39
0
 /// <summary>
 /// Creates a frame source object given the raw FFmpeg frame reference.
 /// </summary>
 /// <param name="frame">The raw FFmpeg frame pointer.</param>
 /// <returns>The media frame</returns>
 protected virtual MediaFrame CreateFrameSource(ref AVFrame *frame)
 {
     return(null);
 }
 /// <inheritdoc />
 public unsafe void OnAudioFrameDecoded(AVFrame *audioFrame, AVFormatContext *context) =>
 Parent?.RaiseAudioFrameDecodedEvent(audioFrame, context);
        public void Close()
        {
            if (!_opened)
                return;
            FFmpegInvoke.av_free(_pConvertedFrame);
            FFmpegInvoke.av_free(_pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(_pConvertContext);

            FFmpegInvoke.av_free(_pDecodedFrame);
            FFmpegInvoke.avcodec_close(_pStream->codec);
            fixed (AVFormatContext** pFormatContext = &_pFormatContext)
            {
                FFmpegInvoke.avformat_close_input(pFormatContext);
            }

            _videoClock = 0;
            _pFormatContext = null;
            _pStream = null;
            _pDecodedFrame = null;
            _pConvertedFrame = null;
            _pConvertedFrameBuffer = null;
            _pConvertContext = null;
            _opened = false;
        }