コード例 #1
0
        /// <summary>
        /// Opens the video stream with the specified index in the media container.
        /// </summary>
        /// <param name="container">The media container.</param>
        /// <param name="options">The media options.</param>
        /// <param name="stream">The stream.</param>
        /// <returns>The opened <see cref="Decoder"/>.</returns>
        internal static Decoder OpenStream(InputContainer container, MediaOptions options, AVStream *stream)
        {
            var      format = container.Pointer;
            AVCodec *codec  = null;

            var index = ffmpeg.av_find_best_stream(format, stream->codec->codec_type, stream->index, -1, &codec, 0);

            index.IfError(ffmpeg.AVERROR_DECODER_NOT_FOUND, "Cannot find a codec for the specified stream.");
            if (index < 0)
            {
                return(null);
            }

            var codecContext = ffmpeg.avcodec_alloc_context3(codec);

            ffmpeg.avcodec_parameters_to_context(codecContext, stream->codecpar)
            .ThrowIfError("Cannot open the stream codec!");
            codecContext->pkt_timebase = stream->time_base;

            var dict = new FFDictionary(options.DecoderOptions, false).Pointer;

            ffmpeg.avcodec_open2(codecContext, codec, &dict)
            .ThrowIfError("Cannot open the stream codec!");

            return(new Decoder(codecContext, stream, container));
        }
コード例 #2
0
    public bool AvFrameToImageByteArray(AVFrame frame, out byte[] pngData)
    {
        AVCodec *       outCodec    = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_PNG);
        AVCodecContext *outCodecCtx = ffmpeg.avcodec_alloc_context3(outCodec);

        outCodecCtx->width         = _pCodecContext->width;
        outCodecCtx->height        = _pCodecContext->height;
        outCodecCtx->pix_fmt       = AVPixelFormat.AV_PIX_FMT_RGB24;
        outCodecCtx->codec_type    = AVMediaType.AVMEDIA_TYPE_VIDEO;
        outCodecCtx->time_base.num = _pCodecContext->time_base.num;
        outCodecCtx->time_base.den = _pCodecContext->time_base.den;

        if (ffmpeg.avcodec_open2(outCodecCtx, outCodec, null) < 0)
        {
            pngData = new byte[] { };
            return(false);
        }

        AVPacket outPacket = new AVPacket();

        ffmpeg.av_init_packet(&outPacket);
        outPacket.size = 0;
        outPacket.data = null;

        ffmpeg.avcodec_send_frame(outCodecCtx, &frame);
        ffmpeg.avcodec_receive_packet(outCodecCtx, &outPacket);

        pngData = new byte[outPacket.size];

        Marshal.Copy((IntPtr)outPacket.data, pngData, 0, outPacket.size);
        return(true);
    }
コード例 #3
0
        //public AVFormatContext* PFormatContext
        //{
        //    get
        //    {
        //        return _pFormatContext;
        //    }
        //}

        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *codec = null;

            // TODO: Why is ffmpeg.AV_CODEC_FLAG_LOW_DELAY ignored!
            _streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO,
                                                      -1, -1, &codec, ffmpeg.AV_CODEC_FLAG_LOW_DELAY).ThrowExceptionIfError();
            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);

            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName            = ffmpeg.avcodec_get_name(codec->id);
            FrameSize            = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat          = _pCodecContext->pix_fmt;
            DurationMilliseconds = _pFormatContext->duration;
            Framerate            = 25;// _pFormatContext->video_codec->supported_framerates[0].num / _pFormatContext->video_codec->supported_framerates[0].den;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();
        }
コード例 #4
0
        public DecoderStream(MediaFile file, ref AVStream stream)
        {
            // Initialize instance variables
            m_disposed = false;
            m_position = m_bufferUsedSize = 0;
            m_file     = file;
            m_avStream = stream;

            m_avCodecCtx = *m_avStream.codec;

            // Open the decoding codec
            AVCodec *avCodec = FFmpeg.avcodec_find_decoder(m_avCodecCtx.codec_id);

            if (avCodec == null)
            {
                throw new DecoderException("No decoder found");
            }

            if (FFmpeg.avcodec_open(ref m_avCodecCtx, avCodec) < 0)
            {
                throw new DecoderException("Error opening codec");
            }

            m_codecOpen = true;
        }
コード例 #5
0
ファイル: StreamDecoder.cs プロジェクト: EYHN/Anything
    public StreamDecoder(
        AVFormatContext *formatContext,
        int streamIndex,
        AVCodec *codec,
        AVHWDeviceType hwDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        : base(null)
    {
        _streamIndex   = streamIndex;
        _formatContext = formatContext;
        _stream        = formatContext->streams[streamIndex];

        _codecContext = ffmpeg.avcodec_alloc_context3(codec);
        if (hwDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            ffmpeg.av_hwdevice_ctx_create(&_codecContext->hw_device_ctx, hwDeviceType, null, null, 0)
            .ThrowExceptionIfError();
        }

        ffmpeg.avcodec_parameters_to_context(_codecContext, _stream->codecpar)
        .ThrowExceptionIfError();
        ffmpeg.avcodec_open2(_codecContext, codec, null).ThrowExceptionIfError();
        _codecContext->pkt_timebase = _stream->time_base;

        _codec = codec;

        _packet        = ffmpeg.av_packet_alloc();
        _frame         = ffmpeg.av_frame_alloc();
        _receivedFrame = ffmpeg.av_frame_alloc();
    }
コード例 #6
0
 public MediaStream(AVFormatContext *formatContext, int streamIndex, AVStream *rawStream, AVCodec *decodeCodec = null)
 {
     RawFormatContext      = formatContext;
     StreamIndex           = streamIndex;
     RawStream             = rawStream;
     _cachedRawDecodeCodec = decodeCodec;
 }
コード例 #7
0
        /// <summary>
        /// Opens the video stream with the specified index in the media container.
        /// </summary>
        /// <param name="container">The media container.</param>
        /// <param name="options">The media options.</param>
        /// <returns>The opened <see cref="Decoder{TFrame}"/>.</returns>
        internal static Decoder <VideoFrame> OpenVideo(InputContainer container, MediaOptions options)
        {
            var      format = container.Pointer;
            AVCodec *codec  = null;

            var index = ffmpeg.av_find_best_stream(format, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);

            index.IfError(ffmpeg.AVERROR_DECODER_NOT_FOUND, "Cannot find a codec for the video stream.");
            if (index < 0)
            {
                return(null);
            }

            var stream       = format->streams[index];
            var codecContext = ffmpeg.avcodec_alloc_context3(codec);

            ffmpeg.avcodec_parameters_to_context(codecContext, stream->codecpar)
            .ThrowIfError("Cannot open the video codec!");
            codecContext->pkt_timebase = stream->time_base;

            var dict = new FFDictionary(options.DecoderOptions, false).Pointer;

            ffmpeg.avcodec_open2(codecContext, codec, &dict)
            .ThrowIfError("Cannot open the video codec");

            return(new Decoder <VideoFrame>(codecContext, stream, container));
        }
コード例 #8
0
        public void SelectStream(AVMediaType type)
        {
            AVCodec *avCodec = null;

            _streamIndex    = ffmpeg.av_find_best_stream(_avFormatContext, type, -1, -1, &avCodec, 0).ThrowOnError();
            _avCodecContext = ffmpeg.avcodec_alloc_context3(avCodec);
            var stream = _avFormatContext->streams[_streamIndex];

            if (HardwareDevice != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_avCodecContext->hw_device_ctx, HardwareDevice, null, null, 0).ThrowOnError();
            }

            ffmpeg.avcodec_parameters_to_context(_avCodecContext, stream->codecpar).ThrowOnError();
            ffmpeg.avcodec_open2(_avCodecContext, avCodec, null).ThrowOnError();

            CodecId        = avCodec->id;
            CodecName      = ffmpeg.avcodec_get_name(CodecId);
            FrameSize      = new Size(_avCodecContext->width, _avCodecContext->height);
            AudioFrameSize = _avCodecContext->frame_size;;
            PixelFormat    = HardwareDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? _avCodecContext->pix_fmt : GetHWPixelFormat(HardwareDevice);
            BitRate        = _avCodecContext->bit_rate;
            FrameRate      = _avCodecContext->framerate;
            TimeBase       = stream->time_base;
        }
コード例 #9
0
        public FFmpegContext(AVCodecID codecId)
        {
            _codec = ffmpeg.avcodec_find_decoder(codecId);
            if (_codec == null)
            {
                Logger.Error?.PrintMsg(LogClass.FFmpeg, $"Codec wasn't found. Make sure you have the {codecId} codec present in your FFmpeg installation.");

                return;
            }

            _context = ffmpeg.avcodec_alloc_context3(_codec);
            if (_context == null)
            {
                Logger.Error?.PrintMsg(LogClass.FFmpeg, "Codec context couldn't be allocated.");

                return;
            }

            if (ffmpeg.avcodec_open2(_context, _codec, null) != 0)
            {
                Logger.Error?.PrintMsg(LogClass.FFmpeg, "Codec couldn't be opened.");

                return;
            }

            _packet = ffmpeg.av_packet_alloc();
            if (_packet == null)
            {
                Logger.Error?.PrintMsg(LogClass.FFmpeg, "Packet couldn't be allocated.");

                return;
            }

            _decodeFrame = Marshal.GetDelegateForFunctionPointer <AVCodec_decode>(_codec->decode.Pointer);
        }
コード例 #10
0
        public H264VideoStreamEncoder(Stream stream, int fps, Size frameSize)
        {
            _stream    = stream;
            _frameSize = frameSize;

            var codecId = AVCodecID.AV_CODEC_ID_H264;

            _pCodec = ffmpeg.avcodec_find_encoder(codecId);
            if (_pCodec == null)
            {
                throw new InvalidOperationException("Codec not found.");
            }

            _pCodecContext            = ffmpeg.avcodec_alloc_context3(_pCodec);
            _pCodecContext->width     = frameSize.Width;
            _pCodecContext->height    = frameSize.Height;
            _pCodecContext->time_base = new AVRational {
                num = 1, den = fps
            };
            _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
            ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryslow", 0);

            ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError();

            _linesizeY = frameSize.Width;
            _linesizeU = frameSize.Width / 2;
            _linesizeV = frameSize.Width / 2;

            _ySize = _linesizeY * frameSize.Height;
            _uSize = _linesizeU * frameSize.Height / 2;
        }
コード例 #11
0
        public FFmpegContext()
        {
            _codec   = ffmpeg.avcodec_find_decoder(AVCodecID.AV_CODEC_ID_H264);
            _context = ffmpeg.avcodec_alloc_context3(_codec);

            ffmpeg.avcodec_open2(_context, _codec, null);
        }
コード例 #12
0
        //@"D:\cshapdemo\ConsoleApp1\会不会.mp3"
        public void open(String url)
        {
            //var codecId = AVCodecID.AV_CODEC_ID_MP3;
            //_pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec);
            //        _pCodec = ffmpeg.avcodec_find_encoder(codecId);
            //_pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec);
            //_pCodecContext->time_base = new AVRational { num = 1, den = fps };
            //ret = ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null);

            int ret = 0;

            _pFormatContext = ffmpeg.avformat_alloc_context();
            var pFormatContext = _pFormatContext;

            ret = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
            ret = ffmpeg.avformat_find_stream_info(pFormatContext, null);
            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    _streamIndex = i;
                    break;
                }
            }
            _pCodecContext = pFormatContext->streams[_streamIndex]->codec;
            AVCodec *codec = ffmpeg.avcodec_find_decoder(_pCodecContext->codec_id);

            ret = ffmpeg.avcodec_open2(_pCodecContext, codec, null);//初始化编码器
            ffmpeg.av_dump_format(pFormatContext, _streamIndex, url, 0);
        }
コード例 #13
0
        public VideoEncoder(AVCodecID codecID, int frameWidth, int frameHeight, int framesPerSecond)
        {
            _frameWidth  = frameWidth;
            _frameHeight = frameHeight;

            _videoCodec = ffmpeg.avcodec_find_encoder(codecID);
            if (_videoCodec == null)
            {
                throw new ApplicationException($"Codec encoder could not be found for {codecID}.");
            }

            _videoCodecContext = ffmpeg.avcodec_alloc_context3(_videoCodec);
            if (_videoCodecContext == null)
            {
                throw new ApplicationException("Failed to allocated codec context.");
            }

            _videoCodecContext->width         = frameWidth;
            _videoCodecContext->height        = frameHeight;
            _videoCodecContext->time_base.den = 30;
            _videoCodecContext->time_base.num = 1;
            _videoCodecContext->pix_fmt       = AVPixelFormat.AV_PIX_FMT_YUV420P;

            ffmpeg.avcodec_open2(_videoCodecContext, _videoCodec, null).ThrowExceptionIfError();
        }
コード例 #14
0
        public VideoDecoder()
        {
            AVCodec *codec = FFmpegInvoke.avcodec_find_decoder(CodecId);

            if (codec == null)
            {
                throw new Exception("Codec not found");
            }

            codec_context = FFmpegInvoke.avcodec_alloc_context3(codec);
            if (codec_context == null)
            {
                throw new Exception("Could not allocate video codec context");
            }

            if (FFmpegInvoke.avcodec_open2(codec_context, codec, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            avFrame = FFmpegInvoke.avcodec_alloc_frame();
            if (avFrame == null)
            {
                throw new Exception("Could not allocate video frame");
            }
        }
コード例 #15
0
        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *codec = null;

            _streamIndex = ffmpeg
                           .av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0)
                           .ThrowExceptionIfError();
            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0)
                .ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar)
            .ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(codec->id);
            FrameSize   = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat = _pCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();
        }
コード例 #16
0
        /// <summary>
        /// Finds the best stream of the specified type in the file.
        /// </summary>
        /// <param name="container">The media container.</param>
        /// <param name="type">Type of the stream to find.</param>
        /// <param name="relStream">Optional. Index of the related stream.</param>
        /// <returns>Index of the found stream, otherwise <see langword="null"/>.</returns>
        private static int?FindBestStream(AVFormatContext *container, AVMediaType type, int relStream = -1)
        {
            AVCodec *codec = null;
            var      id    = ffmpeg.av_find_best_stream(container, type, -1, relStream, &codec, 0);

            return(id >= 0 ? (int?)id : null);
        }
コード例 #17
0
        internal StreamContext(AVStream *stream, FFMPEGDecoder source)
        {
            _stream = stream;
            _source = source;
            var origCtx = _stream->codec;

            //find the corresponding codec
            _codec = ffmpeg.avcodec_find_decoder(origCtx->codec_id);
            if (_codec == null)
            {
                throw new NotSupportedException("This " + ffmpeg.av_get_media_type_string(origCtx->codec_type) +
                                                " codec is not supported by the current ffmpeg binaries!");
            }

            //copy the context from ffmpeg (required because we don't own the other one)
            _codecCtx = ffmpeg.avcodec_alloc_context3(_codec);
            if (ffmpeg.avcodec_parameters_to_context(_codecCtx, _stream->codecpar) != 0)
            {
                throw new Exception("Couldn't copy stream parameters!");
            }

            if (ffmpeg.avcodec_open2(_codecCtx, _codec, null) != 0)
            {
                throw new Exception("Couldn't copy the codec!");
            }


            _decoded = ffmpeg.av_frame_alloc();
        }
コード例 #18
0
ファイル: ffAS.cs プロジェクト: HAN-IBS/ffmpeg4tc
        private static unsafe AVStream *add_audio_stream(AVFormatContext *oc, AVCodecID codec_id, int sample_rate = 44100)
        {
            AVCodecContext *c;
            AVCodec *       encoder = ffmpeg.avcodec_find_encoder(codec_id);
            AVStream *      st      = ffmpeg.avformat_new_stream(oc, encoder);

            if (st == null)
            {
                die("av_new_stream");
            }

            c             = st->codec;
            c->codec_id   = codec_id;
            c->codec_type = AVMediaType.AVMEDIA_TYPE_AUDIO;

            /* put sample parameters */
            c->bit_rate       = 64000;
            c->sample_rate    = sample_rate;
            c->channels       = 2;
            c->sample_fmt     = encoder->sample_fmts[0];
            c->channel_layout = ffmpeg.AV_CH_LAYOUT_STEREO;

            // some formats want stream headers to be separate
            if ((oc->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
            {
                c->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
            }

            return(st);
        }
コード例 #19
0
        public FFmpegCodec(FFmpegFuncs ffmpeg, AVCodec *codec)
        {
            this.ffmpeg = ffmpeg;

            Pointer = codec;
            SupportedHwDeviceTypes = new Lazy <IReadOnlyList <AVHWDeviceType> >(() =>
            {
                var list = new List <AVHWDeviceType>();

                int i = 0;

                while (true)
                {
                    var hwCfg = ffmpeg.avcodec_get_hw_config(codec, i);
                    if (hwCfg == null)
                    {
                        break;
                    }

                    list.Add(hwCfg->device_type);

                    i++;
                }

                return(list);
            });
        }
コード例 #20
0
        public static unsafe Codec[] GetAllDecoders()
        {
            List <Codec> codecs = new List <Codec>();

            void *   i = null;
            AVCodec *c = null;

            while ((c = av_codec_iterate(&i)) != null)
            {
                if (av_codec_is_decoder(c) != 0)
                {
                    var desc = Marshal.PtrToStringAnsi((IntPtr)c->long_name);
                    var name = Marshal.PtrToStringAnsi((IntPtr)c->name);

                    List <AVPixelFormat> formats = new List <AVPixelFormat>();
                    var fmt = c->pix_fmts;
                    while (fmt != null && *fmt != AVPixelFormat.AV_PIX_FMT_NONE)
                    {
                        formats.Add(*fmt);
                        fmt++;
                    }

                    codecs.Add(new Codec(name, desc, formats.ToArray()));
                }
            }

            return(codecs.ToArray());
        }
コード例 #21
0
        public AudioStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *videoCodec = null;

            _streamVideoIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &videoCodec, 0).ThrowExceptionIfError();
            _pVideoCodecContext = ffmpeg.avcodec_alloc_context3(videoCodec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pVideoCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pVideoCodecContext, _pFormatContext->streams[_streamVideoIndex]->codecpar).ThrowExceptionIfError();
            if (_pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den != 0)
            {
                Fps = _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.num / _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den;
                Console.WriteLine("计算得到FPS");
            }
            else
            {
                Console.WriteLine("默认FPS");
                Fps = 25;
            }
            ffmpeg.avcodec_open2(_pVideoCodecContext, videoCodec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(videoCodec->id);
            FrameSize   = new Size(_pVideoCodecContext->width, _pVideoCodecContext->height);
            PixelFormat = _pVideoCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();



            AVCodec *audioCodec = null;

            _streamAudioIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audioCodec, 0).ThrowExceptionIfError();
            _pAudioCodecContext = ffmpeg.avcodec_alloc_context3(audioCodec);
            ffmpeg.avcodec_parameters_to_context(_pAudioCodecContext, _pFormatContext->streams[_streamAudioIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pAudioCodecContext, audioCodec, null).ThrowExceptionIfError();
            if (_streamAudioIndex > 0)
            {
                AVStream *avs = _pFormatContext->streams[_streamAudioIndex];
                Console.WriteLine($"codec_id:{avs->codecpar->codec_id}");
                Console.WriteLine($"format:{avs->codecpar->format}");
                Console.WriteLine($"sample_rate:{avs->codecpar->sample_rate}");
                Console.WriteLine($"channels:{avs->codecpar->channels}");
                Console.WriteLine($"frame_size:{avs->codecpar->frame_size}");
                in_sample_fmt  = _pAudioCodecContext->sample_fmt;
                in_sample_rate = _pAudioCodecContext->sample_rate;    //输入的采样率
                in_ch_layout   = _pAudioCodecContext->channel_layout; //输入的声道布局
                in_channels    = _pAudioCodecContext->channels;
                in_start_time  = avs->start_time;
            }
        }
コード例 #22
0
        private void InitializeVideo()
        {
            // Extract pixel format and codec id
            var inputCodecContext = *(InputVideoStream->codec);
            var inputPixelFormat  = inputCodecContext.pix_fmt;
            var inputCodecId      = inputCodecContext.codec_id;

            // Populate basic properties
            VideoCodec       = inputCodecContext.codec_id.ToString(); // Utils.GetAnsiString(new IntPtr(inputCodecContext.codec_name));
            VideoBitrate     = (int)inputCodecContext.bit_rate;
            VideoFrameWidth  = inputCodecContext.width;
            VideoFrameHeight = inputCodecContext.height;

            VideoFrameRate   = Convert.ToDecimal(Convert.ToDouble(inputCodecContext.framerate.num) / Convert.ToDouble(inputCodecContext.framerate.den));
            VideoFrameLength = VideoFrameRate > 0M ? 1M / VideoFrameRate : 0M;

            // Get an input decoder for the input codec
            AVCodec *inputDecoder = ffmpeg.avcodec_find_decoder(inputCodecId);

            if (inputDecoder == null)
            {
                throw new Exception("Unsupported video codec");
            }

            // Create a Software Sacaling context -- this allows us to do fast colorspace conversion
            VideoResampler = ffmpeg.sws_getContext(
                VideoFrameWidth, VideoFrameHeight, inputPixelFormat,
                VideoFrameWidth, VideoFrameHeight, Constants.VideoOutputPixelFormat,
                (int)ffmpeg.SWS_BILINEAR, null, null, null);

            if (VideoResampler == null)
            {
                throw new Exception("Could not initialize the output conversion context");
            }

            //Create an output codec context. -- We copy the data from the input context and we
            //then proceed to adjust some output parameters.
            // Before it said: var outputCodecContext = &inputCodecContext;
            VideoCodecContext = ffmpeg.avcodec_alloc_context3(inputDecoder);
            if (ffmpeg.avcodec_copy_context(VideoCodecContext, &inputCodecContext) != Constants.SuccessCode)
            {
                throw new Exception("Could not create video output codec context from input");
            }

            if ((inputDecoder->capabilities & (int)ffmpeg.AV_CODEC_CAP_TRUNCATED) == (int)ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                VideoCodecContext->flags |= (int)ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(VideoCodecContext, inputDecoder, null) < Constants.SuccessCode)
            {
                throw new Exception("Could not open codec");
            }

            // All output frames will have the same length and will be held by the same structure; the Decoder frame holder.
            DecodedPictureHolder      = ffmpeg.av_frame_alloc();
            OutputPictureBufferLength = ffmpeg.avpicture_get_size(Constants.VideoOutputPixelFormat, VideoFrameWidth, VideoFrameHeight);
        }
コード例 #23
0
        public void Initialize(int width, int height, int frames_per_second)
        {
            Reset();

            ffmpeg.avformat_network_init();

            fixed(AVFormatContext **c = &formatContext)
            {
                if (ffmpeg.avformat_alloc_output_context2(c, null, "flv", null) < 0)
                {
                    throw new Exception("Could not allocate output format context!");
                }
            }

            codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
            if (codec == null)
            {
                throw new Exception("codec not found!");
            }

            context = ffmpeg.avcodec_alloc_context3(codec);
            if (context == null)
            {
                throw new Exception("alloc context fail");
            }

            context->codec_id   = codec->id;
            context->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO;
            context->pix_fmt    = AVPixelFormat.AV_PIX_FMT_YUV420P;
            context->bit_rate   = 400000;
            context->width      = width;
            context->height     = height;
            context->time_base  = new AVRational {
                num = 1, den = frames_per_second
            };
            context->framerate = new AVRational {
                num = frames_per_second, den = 1
            };
            context->gop_size     = 50;
            context->max_b_frames = 1;
            context->qmin         = 10;
            context->qmax         = 50;
            context->level        = 41;
            context->refs         = 1;
            // context->max_b_frames = 0; // 去掉B帧只留下I帧和P帧
            this.frames_per_second = frames_per_second;

            if (codec->id == AVCodecID.AV_CODEC_ID_H264)
            {
                ffmpeg.av_opt_set(context->priv_data, "preset", "slow", 0);
                // ffmpeg.av_opt_set(context->priv_data, "tune", "zerolatency", 0); // 零延迟
            }

            if ((formatContext->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
            {
                context->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
            }
        }
コード例 #24
0
    public H264Encoder(string outputPath, Size frameSize)
    {
        ffmpeg.RootPath = Path.Join(TestData.SolutionDir, "ffmpeg", "bin");
        Console.WriteLine("FFMPEG version: " + ffmpeg.av_version_info());

        _frameSize = frameSize;

        var codecId = AVCodecID.AV_CODEC_ID_H264;

        _pCodec = ffmpeg.avcodec_find_encoder(codecId);
        if (_pCodec == null)
        {
            throw new InvalidOperationException("Codec not found.");
        }

        _pCodecContext            = ffmpeg.avcodec_alloc_context3(_pCodec);
        _pCodecContext->width     = frameSize.Width;
        _pCodecContext->height    = frameSize.Height;
        _pCodecContext->time_base = new AVRational {
            num = 1, den = 1000
        };
        _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
        ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "superfast", 0);

        ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError();

        _linesizeY = frameSize.Width;
        _linesizeU = frameSize.Width / 2;
        _linesizeV = frameSize.Width / 2;

        _ySize = _linesizeY * frameSize.Height;
        _uSize = _linesizeU * frameSize.Height / 2;

        _swsContext = null;

        // Allocate a frame
        _frame         = ffmpeg.av_frame_alloc();
        _frame->width  = _pCodecContext->width;
        _frame->height = _pCodecContext->height;
        _frame->format = (int)_pCodecContext->pix_fmt;
        ffmpeg.av_frame_get_buffer(_frame, 32);

        // Create output context for mp4
        AVFormatContext *outputContext;

        ffmpeg.avformat_alloc_output_context2(&outputContext, null, "mp4", null).ThrowExceptionIfError();
        _outputContext = outputContext;
        ffmpeg.avio_open2(&_outputContext->pb, outputPath, ffmpeg.AVIO_FLAG_WRITE, null, null);

        // Create video stream in mp4 container
        _stream = ffmpeg.avformat_new_stream(_outputContext, _pCodec);
        ffmpeg.avcodec_parameters_from_context(_stream->codecpar, _pCodecContext)
        .ThrowExceptionIfError();
        _stream->sample_aspect_ratio = _pCodecContext->sample_aspect_ratio;
        _stream->time_base           = _pCodecContext->time_base;
        ffmpeg.avformat_write_header(_outputContext, null);
    }
コード例 #25
0
ファイル: Codec.cs プロジェクト: sdcb/Sdcb.FFmpegAPIWrapper
        public Codec(AVCodec *ptr)
        {
            if (ptr == null)
            {
                throw new ArgumentNullException(nameof(ptr));
            }

            _p = ptr;
        }
コード例 #26
0
ファイル: VideoEncoder.cs プロジェクト: zsybupt/SaarFFmpeg
        private static AVCodec *CheckCodec(AVCodec *codec)
        {
            if (codec->Type != AVMediaType.Video)
            {
                throw new ArgumentException($"{codec->Id}不是视频格式");
            }

            return(codec);
        }
コード例 #27
0
        public unsafe void InitialiseSource()
        {
            if (!_isInitialised)
            {
                _isInitialised = true;

                _fmtCtx        = ffmpeg.avformat_alloc_context();
                _fmtCtx->flags = ffmpeg.AVFMT_FLAG_NONBLOCK;

                var pFormatContext = _fmtCtx;
                ffmpeg.avformat_open_input(&pFormatContext, _sourceUrl, _inputFormat, null).ThrowExceptionIfError();
                ffmpeg.avformat_find_stream_info(_fmtCtx, null).ThrowExceptionIfError();

                ffmpeg.av_dump_format(_fmtCtx, 0, _sourceUrl, 0);

                // Set up audio decoder.
                AVCodec *audCodec = null;
                _audioStreamIndex = ffmpeg.av_find_best_stream(_fmtCtx, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audCodec, 0).ThrowExceptionIfError();
                logger.LogDebug($"FFmpeg file source decoder {ffmpeg.avcodec_get_name(audCodec->id)} audio codec for stream {_audioStreamIndex}.");
                _audDecCtx = ffmpeg.avcodec_alloc_context3(audCodec);
                if (_audDecCtx == null)
                {
                    throw new ApplicationException("Failed to allocate audio decoder codec context.");
                }
                ffmpeg.avcodec_parameters_to_context(_audDecCtx, _fmtCtx->streams[_audioStreamIndex]->codecpar).ThrowExceptionIfError();
                ffmpeg.avcodec_open2(_audDecCtx, audCodec, null).ThrowExceptionIfError();

                // Set up an audio conversion context so that the decoded samples can always be delivered as signed 16 bit mono PCM.

                _swrContext = ffmpeg.swr_alloc();
                ffmpeg.av_opt_set_sample_fmt(_swrContext, "in_sample_fmt", _audDecCtx->sample_fmt, 0);
                ffmpeg.av_opt_set_sample_fmt(_swrContext, "out_sample_fmt", AVSampleFormat.AV_SAMPLE_FMT_S16, 0);

                ffmpeg.av_opt_set_int(_swrContext, "in_sample_rate", _audDecCtx->sample_rate, 0);
                ffmpeg.av_opt_set_int(_swrContext, "out_sample_rate", Helper.AUDIO_SAMPLING_RATE, 0);

                //FIX:Some Codec's Context Information is missing
                if (_audDecCtx->channel_layout == 0)
                {
                    long in_channel_layout = ffmpeg.av_get_default_channel_layout(_audDecCtx->channels);
                    ffmpeg.av_opt_set_channel_layout(_swrContext, "in_channel_layout", in_channel_layout, 0);
                }
                else
                {
                    ffmpeg.av_opt_set_channel_layout(_swrContext, "in_channel_layout", (long)_audDecCtx->channel_layout, 0);
                }
                ffmpeg.av_opt_set_channel_layout(_swrContext, "out_channel_layout", ffmpeg.AV_CH_LAYOUT_MONO, 0);

                ffmpeg.swr_init(_swrContext).ThrowExceptionIfError();


                _audioTimebase      = ffmpeg.av_q2d(_fmtCtx->streams[_audioStreamIndex]->time_base);
                _audioAvgFrameRate  = ffmpeg.av_q2d(_fmtCtx->streams[_audioStreamIndex]->avg_frame_rate);
                _maxAudioFrameSpace = (int)(_audioAvgFrameRate > 0 ? 1000 / _audioAvgFrameRate : 10000 * Helper.AUDIO_SAMPLING_RATE);
            }
        }
コード例 #28
0
ファイル: Codec.cs プロジェクト: fj1981/cyclone-designer
        internal static AVCodec *GetEncoder(AVCodecID codecID)
        {
            AVCodec *codec = FF.avcodec_find_encoder(codecID);

            if (codec == null)
            {
                throw new ArgumentException($"未能找到编码器:{codecID}", nameof(codecID));
            }
            return(codec);
        }
コード例 #29
0
ファイル: OutputContext.cs プロジェクト: 999eagle/baka-sharp
        public AVStream *CreateNewStream(AVCodec *codec = null)
        {
            var stream = ffmpeg.avformat_new_stream(fmtContext, codec);

            if (stream == null)
            {
                throw new FFmpegException(ffmpeg.AVERROR(ffmpeg.ENOMEM), "Failed to create new stream.");
            }
            return(stream);
        }
コード例 #30
0
        /// <summary>
        /// Inits the Codec context.
        /// </summary>
        /// <param name="encoder">If set to <c>true</c> encoder.</param>
        void CreateContext(bool encoder = false)
        {
            if (!Initialized)
            {
                throw new InvalidOperationException("Instance is not initialized yet, call Initialize() first");
            }
            else if (ContextCreated)
            {
                throw new InvalidOperationException("Context already initialized!");
            }

            if (encoder)
            {
                pCodec = ffmpeg.avcodec_find_encoder(avCodecID);
            }
            else
            {
                pCodec = ffmpeg.avcodec_find_decoder(avCodecID);
            }

            if (pCodec == null)
            {
                throw new InvalidOperationException("VideoCodec not found");
            }
            pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec);
            if (pCodecContext == null)
            {
                throw new InvalidOperationException("Could not allocate codec context");
            }

            // Call to abstract method
            SetCodecContextParams(pCodecContext);

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new InvalidOperationException("Could not open codec");
            }

            if (doResample)
            {
                // Call to abstract method
                pResampler = CreateResampler(pCodecContext);
                if (ffmpeg.swr_is_initialized(pResampler) <= 0)
                {
                    throw new InvalidOperationException("Failed to init resampler");
                }
            }

            ContextCreated = true;
        }
コード例 #31
0
ファイル: Codec.cs プロジェクト: ermau/libav.net
		internal Codec (IntPtr ptr, CodecContext context)
		{
			if (context == null)
				throw new ArgumentNullException ("context");
			if (ptr == IntPtr.Zero)
				throw new ArgumentException ("Null pointer");

			this.ptr = ptr;
			this.context = context;
			this.codec = (AVCodec*)ptr;

			Name = Marshal.PtrToStringAnsi ((IntPtr)this.codec->name);
		}