Example #1
0
        private void AddVideoStream(AVCodecID codecId)
        {
            GetVideoCodec(codecId);

            _videoStream = ffmpeg.avformat_new_stream(_formatContext, null);
            if (_videoStream == null)
            {
                throw new Exception("Failed creating new video stream.");
            }

            _videoStream->time_base.num = _videoCodecContext->time_base.num;
            _videoStream->time_base.den = _videoCodecContext->time_base.den;

            if (_videoCodecContext->extradata_size > 0)
            {
                _videoStream->codecpar->extradata_size = _videoCodecContext->extradata_size;
                _videoStream->codecpar->extradata      = (byte *)ffmpeg.av_malloc((ulong)_videoCodecContext->extradata_size + ffmpeg.AV_INPUT_BUFFER_PADDING_SIZE);

                var j = 0;
                while (j++ < _videoCodecContext->extradata_size)
                {
                    *(_videoStream->codecpar->extradata + j) = *(_videoCodecContext->extradata + j);
                }
            }
        }
Example #2
0
        /// <summary>
        /// Add stream by copy <see cref="ffmpeg.avcodec_parameters_copy(AVCodecParameters*, AVCodecParameters*)"/>,
        /// </summary>
        /// <param name="stream"></param>
        /// <param name="flags"></param>
        /// <returns></returns>
        public MediaStream AddStream(MediaStream stream, int flags = 0)
        {
            AVStream *pstream = ffmpeg.avformat_new_stream(pFormatContext, null);

            pstream->id = (int)(pFormatContext->nb_streams - 1);
            ffmpeg.avcodec_parameters_copy(pstream->codecpar, stream.Stream.codecpar);
            pstream->codecpar->codec_tag = 0;
            MediaCodec mediaCodec = null;

            if (stream.Codec != null)
            {
                mediaCodec = MediaEncoder.CreateEncode(stream.Codec.AVCodecContext.codec_id, flags, _ =>
                {
                    AVCodecContext *pContext       = _;
                    AVCodecParameters *pParameters = ffmpeg.avcodec_parameters_alloc();
                    ffmpeg.avcodec_parameters_from_context(pParameters, stream.Codec).ThrowIfError();
                    ffmpeg.avcodec_parameters_to_context(pContext, pParameters);
                    ffmpeg.avcodec_parameters_free(&pParameters);
                    pContext->time_base = stream.Stream.r_frame_rate.ToInvert();
                });
            }
            streams.Add(new MediaStream(pstream)
            {
                TimeBase = stream.Stream.r_frame_rate.ToInvert(), Codec = mediaCodec
            });
            return(streams.Last());
        }
Example #3
0
    public StreamDecoder(
        AVFormatContext *formatContext,
        int streamIndex,
        AVCodec *codec,
        AVHWDeviceType hwDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        : base(null)
    {
        _streamIndex   = streamIndex;
        _formatContext = formatContext;
        _stream        = formatContext->streams[streamIndex];

        _codecContext = ffmpeg.avcodec_alloc_context3(codec);
        if (hwDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            ffmpeg.av_hwdevice_ctx_create(&_codecContext->hw_device_ctx, hwDeviceType, null, null, 0)
            .ThrowExceptionIfError();
        }

        ffmpeg.avcodec_parameters_to_context(_codecContext, _stream->codecpar)
        .ThrowExceptionIfError();
        ffmpeg.avcodec_open2(_codecContext, codec, null).ThrowExceptionIfError();
        _codecContext->pkt_timebase = _stream->time_base;

        _codec = codec;

        _packet        = ffmpeg.av_packet_alloc();
        _frame         = ffmpeg.av_frame_alloc();
        _receivedFrame = ffmpeg.av_frame_alloc();
    }
        public FFmpegAudioStream(IAudioData audioData, AVFormatContext *formatContext, AVStream *stream) : base(audioData)
        {
            lock (FFmpegDecoder.SyncObject)
            {
                _formatContext = formatContext;
                _stream        = stream;

                var codec = avcodec_find_decoder(_stream->codecpar->codec_id);

                _codecContext = avcodec_alloc_context3(codec);

                if (avcodec_parameters_to_context(_codecContext, _stream->codecpar) != 0)
                {
                    throw new Exception(); // TODO
                }
                AVDictionary *dict;
                av_dict_set_int(&dict, "refcounted_frames", 1, 0);

                if (avcodec_open2(_codecContext, codec, &dict) != 0)
                {
                    throw new Exception();
                }

                _avFrame = av_frame_alloc();
            }
        }
Example #5
0
        private static unsafe AVStream *add_audio_stream(AVFormatContext *oc, AVCodecID codec_id, int sample_rate = 44100)
        {
            AVCodecContext *c;
            AVCodec *       encoder = ffmpeg.avcodec_find_encoder(codec_id);
            AVStream *      st      = ffmpeg.avformat_new_stream(oc, encoder);

            if (st == null)
            {
                die("av_new_stream");
            }

            c             = st->codec;
            c->codec_id   = codec_id;
            c->codec_type = AVMediaType.AVMEDIA_TYPE_AUDIO;

            /* put sample parameters */
            c->bit_rate       = 64000;
            c->sample_rate    = sample_rate;
            c->channels       = 2;
            c->sample_fmt     = encoder->sample_fmts[0];
            c->channel_layout = ffmpeg.AV_CH_LAYOUT_STEREO;

            // some formats want stream headers to be separate
            if ((oc->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
            {
                c->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
            }

            return(st);
        }
Example #6
0
        internal VideoEncoder(AVStream *stream, VideoFormat format, VideoEncoderParameters encoderParams = null) : base(stream)
        {
            encoderParams = encoderParams ?? VideoEncoderParameters.Default;

            InFormat = format;
            Init(encoderParams);
        }
Example #7
0
        public MediaReader(Stream stream, InFormat iformat = null, MediaDictionary options = null)
        {
            baseStream = stream;
            avio_Alloc_Context_Read_Packet = ReadFunc;
            avio_Alloc_Context_Seek        = SeekFunc;
            pFormatContext     = ffmpeg.avformat_alloc_context();
            pFormatContext->pb = ffmpeg.avio_alloc_context((byte *)ffmpeg.av_malloc(bufferLength), bufferLength, 0, null,
                                                           avio_Alloc_Context_Read_Packet, null, avio_Alloc_Context_Seek);
            fixed(AVFormatContext **ppFormatContext = &pFormatContext)
            {
                ffmpeg.avformat_open_input(ppFormatContext, null, iformat, options).ThrowExceptionIfError();
            }

            ffmpeg.avformat_find_stream_info(pFormatContext, null).ThrowExceptionIfError();
            base.Format = iformat ?? new InFormat(pFormatContext->iformat);

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                AVStream *  pStream = pFormatContext->streams[i];
                MediaDecode codec   = MediaDecode.CreateDecode(pStream->codecpar->codec_id, _ =>
                {
                    ffmpeg.avcodec_parameters_to_context(_, pStream->codecpar);
                });
                streams.Add(new MediaStream(pStream)
                {
                    Codec = codec
                });
            }
        }
Example #8
0
        public void CheckSleep(AVPacket packet, long pts, AVStream *refStream)
        {
            if (packet.stream_index == refStream->index)
            {
                if (pts > 0)
                {
                    if (_firstTimestamp == 0)
                    {
                        _start          = DateTime.UtcNow;
                        _firstTimestamp = pts;
                    }

                    if (!_checkSleep)
                    {
                        _clock.WaitOne(10); //don't hammer it
                        return;
                    }

                    var ratio   = (Convert.ToDouble(refStream->time_base.num) / Convert.ToDouble(refStream->time_base.den)) * 1000;
                    var ptsbase = pts * ratio;
                    var ptsref  = _firstTimestamp * ratio;

                    var ms      = (DateTime.UtcNow - _start).TotalMilliseconds;
                    int msSleep = (int)Math.Max(0, ((ptsbase - ptsref) / 1) - ms);

                    if (msSleep > 0)
                    {
                        _clock.WaitOne(msSleep);
                    }
                }
            }
        }
Example #9
0
        /// <summary>
        /// Load path
        /// </summary>
        /// <param name="url"></param>
        /// <param name="iformat"></param>
        /// <param name="options"></param>
        public MediaReader(string url, InFormat iformat = null, MediaDictionary options = null)
        {
            unsafe
            {
                fixed(AVFormatContext **ppFormatContext = &pFormatContext)
                {
                    ffmpeg.avformat_open_input(ppFormatContext, url, iformat, options).ThrowExceptionIfError();
                }

                ffmpeg.avformat_find_stream_info(pFormatContext, null).ThrowExceptionIfError();
                base.Format = iformat ?? new InFormat(pFormatContext->iformat);

                for (int i = 0; i < pFormatContext->nb_streams; i++)
                {
                    AVStream *  pStream = pFormatContext->streams[i];
                    MediaDecode codec   = MediaDecode.CreateDecode(pStream->codecpar->codec_id, _ =>
                    {
                        ffmpeg.avcodec_parameters_to_context(_, pStream->codecpar);
                    });
                    streams.Add(new MediaStream(pStream)
                    {
                        Codec = codec
                    });
                }
            }
        }
Example #10
0
        public static FFmpegStream Create([NotNull] AVStream *pStream, [NotNull] FFmpegMedia media)
        {
            if (pStream == null)
            {
                throw new ArgumentNullException(nameof(pStream));
            }
            if (media == null)
            {
                throw new ArgumentNullException(nameof(media));
            }

            FFmpegStream result;
            var          pCodec = pStream->codec;

            switch (pCodec->codec_type)
            {
            case AVMediaType.AVMEDIA_TYPE_AUDIO:
                result = new AudioStream(pStream, media);
                break;

            case AVMediaType.AVMEDIA_TYPE_VIDEO:
                result = new VideoStream(pStream, media);
                break;

            case AVMediaType.AVMEDIA_TYPE_SUBTITLE:
                result = new SubtitleStream(pStream, media);
                break;

            default:
                return(null);
            }
            return(result);
        }
Example #11
0
        public unsafe void GetAVFormatContext(string out_filename)
        {
            AVFormatContext *ofmt_ctx = default(AVFormatContext *);

            ffmpeg.avformat_alloc_output_context2(&ofmt_ctx, null, "flv", out_filename);
            ffmpeg.av_interleaved_write_frame(ofmt_ctx, this._pPacket);
            AVStream *pStream2 = null;
            int       i        = 0;

            while (true)
            {
                if (i < this._pFormatContext->nb_streams)
                {
                    if (this._pFormatContext->streams[i]->codec->codec_type != 0)
                    {
                        i++;
                        continue;
                    }
                    break;
                }
                return;
            }
            pStream2 = this._pFormatContext->streams[i];
            AVStream *out_stream = ffmpeg.avformat_new_stream(ofmt_ctx, pStream2->codec->codec);
        }
Example #12
0
        public DecoderStream(MediaFile file, AVStream *stream)
        {
            // Initialize instance variables
            m_disposed = false;
            m_position = m_bufferUsedSize = 0;
            m_file     = file;
            m_avStream = *stream;

            m_avCodecCtx = *m_avStream.codec;

            // Open the decoding codec
            AVCodec *avCodec = FFmpeg.avcodec_find_decoder(m_avCodecCtx.codec_id);

            if (avCodec == null)
            {
                throw new DecoderException("No decoder found");
            }

            if (FFmpeg.avcodec_open(ref m_avCodecCtx, avCodec) < 0)
            {
                throw new DecoderException("Error opening codec");
            }

            m_codecOpen = true;
        }
Example #13
0
 /// <summary>
 /// Called once for each audio stream
 /// </summary>
 internal AudioContext(AVStream *stream, FFMPEGDecoder source, AudioFormat resampleTarget) : base(stream, source)
 {
     _resampleTarget = resampleTarget;
     BufferCapacity  = resampleTarget.SampleRate * resampleTarget.Channels * 4;
     _circBuf        = new CircularBuffer(BufferCapacity);
     CreateAudio();
 }
Example #14
0
 public MediaStream(AVFormatContext *formatContext, int streamIndex, AVStream *rawStream, AVCodec *decodeCodec = null)
 {
     RawFormatContext      = formatContext;
     StreamIndex           = streamIndex;
     RawStream             = rawStream;
     _cachedRawDecodeCodec = decodeCodec;
 }
Example #15
0
        /// <summary>
        /// Initializes a new instance of the <see cref="StreamInfo"/> class.
        /// </summary>
        /// <param name="stream">A generic stream.</param>
        /// <param name="container">The input container.</param>
        internal unsafe StreamInfo(AVStream *stream, InputContainer container)
        {
            var codec = stream->codec;

            Metadata     = new ReadOnlyDictionary <string, string>(FFDictionary.ToDictionary(stream->metadata));
            CodecName    = ffmpeg.avcodec_get_name(codec->codec_id);
            CodecId      = codec->codec_id.FormatEnum(12);
            Index        = stream->index;
            IsInterlaced = codec->field_order != AVFieldOrder.AV_FIELD_PROGRESSIVE &&
                           codec->field_order != AVFieldOrder.AV_FIELD_UNKNOWN;
            TimeBase = stream->time_base;
            Duration = stream->duration >= 0
                ? stream->duration.ToTimeSpan(stream->time_base)
                : TimeSpan.FromTicks(container.Pointer->duration * 10);
            var start = stream->start_time.ToTimeSpan(stream->time_base);

            StartTime = start == TimeSpan.MinValue ? TimeSpan.Zero : start;

            if (stream->nb_frames > 0)
            {
                IsFrameCountProvidedByContainer = true;
                FrameCount = (int)stream->nb_frames;
            }
            else
            {
                FrameCount = Duration.ToFrameNumber(stream->avg_frame_rate);
            }
        }
Example #16
0
        internal StreamContext(AVStream *stream, FFMPEGDecoder source)
        {
            _stream = stream;
            _source = source;
            var origCtx = _stream->codec;

            //find the corresponding codec
            _codec = ffmpeg.avcodec_find_decoder(origCtx->codec_id);
            if (_codec == null)
            {
                throw new NotSupportedException("This " + ffmpeg.av_get_media_type_string(origCtx->codec_type) +
                                                " codec is not supported by the current ffmpeg binaries!");
            }

            //copy the context from ffmpeg (required because we don't own the other one)
            _codecCtx = ffmpeg.avcodec_alloc_context3(_codec);
            if (ffmpeg.avcodec_parameters_to_context(_codecCtx, _stream->codecpar) != 0)
            {
                throw new Exception("Couldn't copy stream parameters!");
            }

            if (ffmpeg.avcodec_open2(_codecCtx, _codec, null) != 0)
            {
                throw new Exception("Couldn't copy the codec!");
            }


            _decoded = ffmpeg.av_frame_alloc();
        }
Example #17
0
        public AudioStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *videoCodec = null;

            _streamVideoIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &videoCodec, 0).ThrowExceptionIfError();
            _pVideoCodecContext = ffmpeg.avcodec_alloc_context3(videoCodec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pVideoCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pVideoCodecContext, _pFormatContext->streams[_streamVideoIndex]->codecpar).ThrowExceptionIfError();
            if (_pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den != 0)
            {
                Fps = _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.num / _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den;
                Console.WriteLine("计算得到FPS");
            }
            else
            {
                Console.WriteLine("默认FPS");
                Fps = 25;
            }
            ffmpeg.avcodec_open2(_pVideoCodecContext, videoCodec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(videoCodec->id);
            FrameSize   = new Size(_pVideoCodecContext->width, _pVideoCodecContext->height);
            PixelFormat = _pVideoCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();



            AVCodec *audioCodec = null;

            _streamAudioIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audioCodec, 0).ThrowExceptionIfError();
            _pAudioCodecContext = ffmpeg.avcodec_alloc_context3(audioCodec);
            ffmpeg.avcodec_parameters_to_context(_pAudioCodecContext, _pFormatContext->streams[_streamAudioIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pAudioCodecContext, audioCodec, null).ThrowExceptionIfError();
            if (_streamAudioIndex > 0)
            {
                AVStream *avs = _pFormatContext->streams[_streamAudioIndex];
                Console.WriteLine($"codec_id:{avs->codecpar->codec_id}");
                Console.WriteLine($"format:{avs->codecpar->format}");
                Console.WriteLine($"sample_rate:{avs->codecpar->sample_rate}");
                Console.WriteLine($"channels:{avs->codecpar->channels}");
                Console.WriteLine($"frame_size:{avs->codecpar->frame_size}");
                in_sample_fmt  = _pAudioCodecContext->sample_fmt;
                in_sample_rate = _pAudioCodecContext->sample_rate;    //输入的采样率
                in_ch_layout   = _pAudioCodecContext->channel_layout; //输入的声道布局
                in_channels    = _pAudioCodecContext->channels;
                in_start_time  = avs->start_time;
            }
        }
 private MediaStream(AVStream *ptr)
 {
     if (ptr == null)
     {
         throw new ArgumentNullException(nameof(ptr));
     }
     _ptr = ptr;
 }
Example #19
0
        public AudioStream([NotNull] AVStream *pStream, [NotNull] FFmpegMedia media)
            : base(pStream, media)
        {
            var pCodec = pStream->codec;

            ChannelCount = pCodec->channels;
            SampleRate   = pCodec->sample_rate;
        }
Example #20
0
        /// <summary>
        /// Initializes a new instance of the <see cref="InputStream{TFrame}"/> class.
        /// </summary>
        /// <param name="stream">The multimedia stream.</param>
        /// <param name="owner">The container that owns the stream.</param>
        public InputStream(AVStream *stream, InputContainer owner)
            : base(stream)
        {
            OwnerFile = owner;

            Type = typeof(TFrame) == typeof(VideoFrame) ? MediaType.Video : MediaType.None;
            Info = new StreamInfo(stream, owner);
        }
Example #21
0
        public StreamDecoder(string url, DecoderConfiguration configuration)
        {
            FFmpegBinariesHelper.RegisterFFmpegBinaries(); //Should not be here

            this.decoderConfiguration = configuration;
            _pFormatContext           = ffmpeg.avformat_alloc_context();

            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();

            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();

            // find the first video stream
            AVStream *pStream = null;

            for (var i = 0; i < _pFormatContext->nb_streams; i++)
            {
                if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = _pFormatContext->streams[i];
                    break;
                }
            }

            if (pStream == null)
            {
                throw new InvalidOperationException("Could not found video stream.");
            }

            _streamIndex   = pStream->index;
            _pCodecContext = pStream->codec;


            var codecId = _pCodecContext->codec_id;
            var pCodec  = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                throw new InvalidOperationException("Unsupported codec.");
            }

            ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError();

            decoderConfiguration.codec           = FormatHelper.FFmpegToOT2(codecId);
            decoderConfiguration.inputResolution = new Resolution(_pCodecContext->height, _pCodecContext->width);
            //CodecName = ffmpeg.avcodec_get_name(codecId);
            //PixelFormat = _pCodecContext->pix_fmt;
            decoderConfiguration.inputPixelFormat = FormatHelper.FFmpegToOT2(_pCodecContext->pix_fmt);

            _pPacket       = ffmpeg.av_packet_alloc();
            _pFrame        = ffmpeg.av_frame_alloc();
            decodingThread = new Thread(() => { while (DecodeFrame() == 0)
                                                {
                                                    ;
                                                }
                                        });
        }
Example #22
0
        public VideoStreamDecoder(string device)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            var pFormatContext = _pFormatContext;

            //ffmpeg.av_register_all();
            ffmpeg.avdevice_register_all();

            //webcam
            AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow");

            ffmpeg.avformat_open_input(&pFormatContext, device, iformat, null).ThrowExceptionIfError();

            //미디어 파일 열기 url주소 또는 파일 이름 필요
            //ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();

            ////미디어 정보 가져옴, blocking 함수라서 network protocol으로 가져올 시, 블락될수도 있슴
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();

            // find the first video stream
            AVStream *pStream = null;

            for (var i = 0; i < _pFormatContext->nb_streams; i++)
            {
                if (_pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = _pFormatContext->streams[i];
                    break;
                }
            }

            if (pStream == null)
            {
                throw new InvalidOperationException("Could not found video stream.");
            }

            _streamIndex   = pStream->index;
            _pCodecContext = pStream->codec;

            var codecId = _pCodecContext->codec_id;
            var pCodec  = ffmpeg.avcodec_find_decoder(codecId); //H264

            if (pCodec == null)
            {
                throw new InvalidOperationException("Unsupported codec.");
            }

            //open codec
            ffmpeg.avcodec_open2(_pCodecContext, pCodec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(codecId);
            FrameSize   = new System.Windows.Size(_pCodecContext->width, _pCodecContext->height); // 640 480
            PixelFormat = _pCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();
        }
Example #23
0
        public void Start()
        {
            var OutFmt = av_guess_format(null, Filename, null);

            if (OutFmt == null)
            {
                throw new Exception("Couldn't find output format");
            }

            AVFormatContext *ctx = null;

            avformat_alloc_output_context2(&ctx, OutFmt, null, null).AssertNotNeg();
            OutCtx = ctx != null ? ctx : throw new Exception("Couldn't allocate output context");

            if (Vid != null)
            {
                VStream = avformat_new_stream(OutCtx, avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264));
                if (VStream == null)
                {
                    throw new Exception("Couldn't allocate video stream");
                }

                VStream->codecpar->codec_id   = AVCodecID.AV_CODEC_ID_H264;
                VStream->codecpar->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO;
                VStream->codecpar->width      = StreamInfo.VideoWidth;
                VStream->codecpar->height     = StreamInfo.VideoHeight;
                VStream->codecpar->format     = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
            }

            if (Aud != null)
            {
                AStream = avformat_new_stream(OutCtx, avcodec_find_encoder(AVCodecID.AV_CODEC_ID_MP3));
                if (AStream == null)
                {
                    throw new Exception("Couldn't allocate audio stream");
                }

                AStream->id = Vid == null ? 0 : 1;
                AStream->codecpar->codec_id       = AVCodecID.AV_CODEC_ID_MP3;
                AStream->codecpar->codec_type     = AVMediaType.AVMEDIA_TYPE_AUDIO;
                AStream->codecpar->sample_rate    = StreamInfo.AudioSampleRate;
                AStream->codecpar->channels       = StreamInfo.AudioChannels;
                AStream->codecpar->format         = (int)AVSampleFormat.AV_SAMPLE_FMT_FLTP;
                AStream->codecpar->channel_layout = AV_CH_LAYOUT_STEREO;
                AStream->codecpar->frame_size     = StreamInfo.AudioSamplesPerPayload;
                AStream->codecpar->bit_rate       = 128000;
            }

            avio_open(&OutCtx->pb, Filename, AVIO_FLAG_WRITE).Assert();
            avformat_write_header(OutCtx, null).Assert();

            Vid?.UseContext(OutCtx);
            Aud?.UseContext(OutCtx, AStream->id);

            Console.WriteLine("Starting to stream. Press enter to quit, if you close SysDVR-Client via the X button the output video may become corrupted");
            Running = true;
        }
 public AVStream* this[int idx]
 {
     get
     {
         switch (idx)
         {
             case 0: return stream_ptrs1;
             case 1: return stream_ptrs2;
             case 2: return stream_ptrs3;
             case 3: return stream_ptrs4;
             case 4: return stream_ptrs5;
             case 5: return stream_ptrs6;
             case 6: return stream_ptrs7;
             case 7: return stream_ptrs8;
             case 8: return stream_ptrs9;
             case 9: return stream_ptrs10;
             case 10: return stream_ptrs11;
             case 11: return stream_ptrs12;
             case 12: return stream_ptrs13;
             case 13: return stream_ptrs14;
             case 14: return stream_ptrs15;
             case 15: return stream_ptrs16;
             case 16: return stream_ptrs17;
             case 17: return stream_ptrs18;
             case 18: return stream_ptrs19;
             case 19: return stream_ptrs20;
             default: throw new ArgumentOutOfRangeException();
         }
     }
     set
     {
         switch (idx)
         {
             case 0: stream_ptrs1 = value; break;
             case 1: stream_ptrs2 = value; break;
             case 2: stream_ptrs3 = value; break;
             case 3: stream_ptrs4 = value; break;
             case 4: stream_ptrs5 = value; break;
             case 5: stream_ptrs6 = value; break;
             case 6: stream_ptrs7 = value; break;
             case 7: stream_ptrs8 = value; break;
             case 8: stream_ptrs9 = value; break;
             case 9: stream_ptrs10 = value; break;
             case 10: stream_ptrs11 = value; break;
             case 11: stream_ptrs12 = value; break;
             case 12: stream_ptrs13 = value; break;
             case 13: stream_ptrs14 = value; break;
             case 14: stream_ptrs15 = value; break;
             case 15: stream_ptrs16 = value; break;
             case 16: stream_ptrs17 = value; break;
             case 17: stream_ptrs18 = value; break;
             case 18: stream_ptrs19 = value; break;
             case 19: stream_ptrs20 = value; break;
             default: throw new ArgumentOutOfRangeException();
         }
     }
 }
Example #25
0
        /// <summary>
        /// Initializes a new instance of the <see cref="InputStream{TFrame}"/> class.
        /// </summary>
        /// <param name="stream">The multimedia stream.</param>
        /// <param name="owner">The container that owns the stream.</param>
        public InputStream(AVStream *stream, InputContainer owner)
            : base(stream)
        {
            OwnerFile   = owner;
            PacketQueue = new ObservableQueue <MediaPacket>();

            Type = typeof(TFrame) == typeof(VideoFrame) ? MediaType.Video : MediaType.None;
            Info = new StreamInfo(stream, owner);
        }
Example #26
0
    public H264Encoder(string outputPath, Size frameSize)
    {
        ffmpeg.RootPath = Path.Join(TestData.SolutionDir, "ffmpeg", "bin");
        Console.WriteLine("FFMPEG version: " + ffmpeg.av_version_info());

        _frameSize = frameSize;

        var codecId = AVCodecID.AV_CODEC_ID_H264;

        _pCodec = ffmpeg.avcodec_find_encoder(codecId);
        if (_pCodec == null)
        {
            throw new InvalidOperationException("Codec not found.");
        }

        _pCodecContext            = ffmpeg.avcodec_alloc_context3(_pCodec);
        _pCodecContext->width     = frameSize.Width;
        _pCodecContext->height    = frameSize.Height;
        _pCodecContext->time_base = new AVRational {
            num = 1, den = 1000
        };
        _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
        ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "superfast", 0);

        ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError();

        _linesizeY = frameSize.Width;
        _linesizeU = frameSize.Width / 2;
        _linesizeV = frameSize.Width / 2;

        _ySize = _linesizeY * frameSize.Height;
        _uSize = _linesizeU * frameSize.Height / 2;

        _swsContext = null;

        // Allocate a frame
        _frame         = ffmpeg.av_frame_alloc();
        _frame->width  = _pCodecContext->width;
        _frame->height = _pCodecContext->height;
        _frame->format = (int)_pCodecContext->pix_fmt;
        ffmpeg.av_frame_get_buffer(_frame, 32);

        // Create output context for mp4
        AVFormatContext *outputContext;

        ffmpeg.avformat_alloc_output_context2(&outputContext, null, "mp4", null).ThrowExceptionIfError();
        _outputContext = outputContext;
        ffmpeg.avio_open2(&_outputContext->pb, outputPath, ffmpeg.AVIO_FLAG_WRITE, null, null);

        // Create video stream in mp4 container
        _stream = ffmpeg.avformat_new_stream(_outputContext, _pCodec);
        ffmpeg.avcodec_parameters_from_context(_stream->codecpar, _pCodecContext)
        .ThrowExceptionIfError();
        _stream->sample_aspect_ratio = _pCodecContext->sample_aspect_ratio;
        _stream->time_base           = _pCodecContext->time_base;
        ffmpeg.avformat_write_header(_outputContext, null);
    }
Example #27
0
        protected Decoder(AVStream *stream) : base(stream)
        {
            codecContext->Codec = FF.avcodec_find_decoder(codecContext->CodecId);
            int resultCode = FF.avcodec_open2(codecContext, codecContext->Codec, null);

            if (resultCode != 0)
            {
                throw new FFmpegException(resultCode);
            }
        }
Example #28
0
        internal static Decoder Create(AVStream *stream)
        {
            switch (stream->Codec->CodecType)
            {
            case AVMediaType.Audio: return(new AudioDecoder(stream));

            case AVMediaType.Video: return(new VideoDecoder(stream));
            }
            return(null);
        }
Example #29
0
 internal AudioEncoder(AVStream *stream, AudioFormat inFormat, BitRate bitRate) : base(stream)
 {
     try {
         InFormat = inFormat;
         Init(codecContext->CodecId, bitRate);
     } catch {
         Dispose();
         throw;
     }
 }
Example #30
0
        /// <summary>
        /// Initializes a new instance of the <see cref="InputStream{TFrame}"/> class.
        /// </summary>
        /// <param name="stream">The multimedia stream.</param>
        /// <param name="owner">The container that owns the stream.</param>
        public InputStream(AVStream *stream, InputContainer owner)
            : base(stream)
        {
            OwnerFile   = owner;
            packetQueue = new ConcurrentQueue <MediaPacket>();

            Type         = typeof(TFrame) == typeof(VideoFrame) ? MediaType.Video : MediaType.None;
            Info         = new StreamInfo(stream, owner);
            decodedFrame = new TFrame();
        }
Example #31
0
        /// <summary>
        /// Initializes a new instance of the <see cref="VideoStreamInfo"/> class.
        /// </summary>
        /// <param name="stream">A generic stream.</param>
        /// <param name="container">The input container.</param>
        internal unsafe VideoStreamInfo(AVStream *stream, InputContainer container)
            : base(stream, MediaType.Video, container)
        {
            var codec = stream->codec;

            IsInterlaced = codec->field_order != AVFieldOrder.AV_FIELD_PROGRESSIVE &&
                           codec->field_order != AVFieldOrder.AV_FIELD_UNKNOWN;
            FrameSize     = new Size(codec->width, codec->height);
            PixelFormat   = codec->pix_fmt.FormatEnum(11);
            AVPixelFormat = codec->pix_fmt;
        }
Example #32
0
		internal MediaStream (IntPtr ptr)
		{
			if (ptr == IntPtr.Zero)
				throw new ArgumentException ("Null pointer");

			this.ptr = ptr;
			this.stream = (AVStream*)ptr;

			Language = Marshal.PtrToStringAnsi (new IntPtr (this.stream->language));
			CodecContext = new CodecContext ((IntPtr)this.stream->codec);
		}
Example #33
0
        /// <summary>
        /// Initializes the internal transcoder -- This create the input, processing, and output blocks that make
        /// up the video and audio decoding stream.
        /// </summary>
        /// <param name="filePath">The file path.</param>
        /// <param name="inputFormatName">Name of the input format. Leave null or empty to detect automatically</param>
        /// <param name="referer">The referer. Leave null or empty to skip setting it</param>
        /// <param name="userAgent">The user agent. Leave null or empty to skip setting it.</param>
        /// <exception cref="FileFormatException"></exception>
        /// <exception cref="Exception">Could not find stream info
        /// or
        /// Media must contain at least a video or and audio stream</exception>
        /// <exception cref="System.Exception">Could not open file
        /// or
        /// Could not find stream info
        /// or
        /// Media must contain a video stream
        /// or
        /// Media must contain an audio stream
        /// or
        /// Unsupported codec
        /// or
        /// Could not initialize the output conversion context
        /// or
        /// Could not create output codec context from input
        /// or
        /// Could not open codec</exception>
        private void InitializeMedia(string filePath, string inputFormatName, string referer, string userAgent)
        {
            // Create the input format context by opening the file
            InputFormatContext = ffmpeg.avformat_alloc_context();

            AVDictionary* optionsDict = null;

            if (string.IsNullOrWhiteSpace(userAgent) == false)
                ffmpeg.av_dict_set(&optionsDict, "user-agent", userAgent, 0);

            if (string.IsNullOrWhiteSpace(referer) == false)
                ffmpeg.av_dict_set(&optionsDict, "headers", $"Referer:{referer}", 0);

            ffmpeg.av_dict_set_int(&optionsDict, "usetoc", 1, 0);

            { // for m3u8 (HLS) streaming
                // TODO: maybe detect here if it is streaming? I need to test if this negatively affects filesystem files or network files as opposed to RTSP streams and HLS streams
                ffmpeg.av_dict_set_int(&optionsDict, "multiple_requests", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_at_eof", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_streamed", 1, 0);
                ffmpeg.av_dict_set_int(&optionsDict, "reconnect_delay_max", (int)Constants.WaitForPlaybackReadyStateTimeout.TotalMilliseconds, 0);
            }


            AVInputFormat* inputFormat = null;

            if (string.IsNullOrWhiteSpace(inputFormatName) == false)
            inputFormat = ffmpeg.av_find_input_format(inputFormatName);

            fixed (AVFormatContext** inputFormatContextRef = &InputFormatContext)
            {
                if (ffmpeg.avformat_open_input(inputFormatContextRef, filePath, inputFormat, &optionsDict) != 0)
                    throw new FileFormatException(string.Format("Could not open stream or file '{0}'", filePath));
            }

            InputFormatContext->iformat->flags |= ffmpeg.AVFMT_FLAG_NOBUFFER;
            InputFormatContext->iformat->flags |= ffmpeg.AVFMT_FLAG_NOFILLIN;

            ffmpeg.av_dict_free(&optionsDict);
            
            // Extract the stream info headers from the file
            if (ffmpeg.avformat_find_stream_info(InputFormatContext, null) != 0)
                throw new Exception("Could not find stream info");

            // search for the audio and video streams
            for (int i = 0; i < InputFormatContext->nb_streams; i++)
            {
                var codecType = InputFormatContext->streams[i]->codec->codec_type;

                if (codecType == AVMediaType.AVMEDIA_TYPE_VIDEO && InputVideoStream == null)
                {
                    InputVideoStream = InputFormatContext->streams[i];
                    continue;
                }

                if (codecType == AVMediaType.AVMEDIA_TYPE_AUDIO && InputAudioStream == null)
                {
                    InputAudioStream = InputFormatContext->streams[i];
                    continue;
                }
            }

            if (InputVideoStream != null)
            {
                this.InitializeVideo();
                this.HasVideo = VideoBitrate > 0 || VideoFrameRate > 0M || VideoFrameWidth > 0 || VideoFrameHeight > 0;
            }

            if (InputAudioStream != null)
            {
                this.InitializeAudio();
                this.HasAudio = AudioBytesPerSample > 0;
            }

            if (HasAudio == false && HasVideo == false)
            {
                throw new Exception("Media must contain at least a video or and audio stream");
            }
            else
            {
                // General Properties here

                NaturalDuration = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->duration) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                IsLiveStream = Helper.IsNoPtsValue(InputFormatContext->duration);
                StartTime = Convert.ToDecimal(Convert.ToDouble(InputFormatContext->start_time) / Convert.ToDouble(ffmpeg.AV_TIME_BASE));
                EndTime = StartTime + NaturalDuration;

                RealtimeClock.Seek(StartTime);
            }
        }
Example #34
0
        public void Close()
        {
            if (!_opened)
                return;
            FFmpegInvoke.av_free(_pConvertedFrame);
            FFmpegInvoke.av_free(_pConvertedFrameBuffer);
            FFmpegInvoke.sws_freeContext(_pConvertContext);

            FFmpegInvoke.av_free(_pDecodedFrame);
            FFmpegInvoke.avcodec_close(_pStream->codec);
            fixed (AVFormatContext** pFormatContext = &_pFormatContext)
            {
                FFmpegInvoke.avformat_close_input(pFormatContext);
            }

            _videoClock = 0;
            _pFormatContext = null;
            _pStream = null;
            _pDecodedFrame = null;
            _pConvertedFrame = null;
            _pConvertedFrameBuffer = null;
            _pConvertContext = null;
            _opened = false;
        }
Example #35
0
        public void Open(string FileName)
        {
            DecoderConfig.Init();

            AVFormatContext* pFormatContext = FFmpegInvoke.avformat_alloc_context();
            _pFormatContext = pFormatContext;

            if (FFmpegInvoke.avformat_open_input(&pFormatContext, FileName, null, null) != 0)
                throw new Exception("Could not open file");

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
                throw new Exception("Could not find stream info");

            for (int i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    _pStream = pFormatContext->streams[i];
                    break;
                }
            }

            if (_pStream == null)
                throw new Exception("Could not found video stream");
            AVCodecContext codecContext = *(_pStream->codec);
            codecContext.workaround_bugs = FFmpegInvoke.FF_BUG_AUTODETECT;

            _frameduration = 1 / q2d(_pStream->r_frame_rate);
            FrameCount = _pStream->nb_frames;
            Duration = (float)pFormatContext->duration / FFmpegInvoke.AV_TIME_BASE;
            Width = codecContext.width;
            Height = codecContext.height;

            AVPixelFormat sourcePixFmt = codecContext.pix_fmt;
            AVCodecID codecId = codecContext.codec_id;
            var convertToPixFmt = AVPixelFormat.AV_PIX_FMT_RGB24;
            _pConvertContext = FFmpegInvoke.sws_getContext(Width, Height, sourcePixFmt,
                                                                       Width, Height, convertToPixFmt,
                                                                       FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);

            if (_pConvertContext == null)
                throw new Exception("Could not initialize the conversion context");

            _pConvertedFrame = (AVPicture*)FFmpegInvoke.avcodec_alloc_frame();
            int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, Width, Height);
            _pConvertedFrameBuffer = (byte*)FFmpegInvoke.av_malloc((uint)convertedFrameBufferSize);
            FFmpegInvoke.avpicture_fill(_pConvertedFrame, _pConvertedFrameBuffer, convertToPixFmt, Width, Height);

            AVCodec* pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);
            if (pCodec == null)
                throw new Exception("Unsupported codec");

            if (FFmpegInvoke.avcodec_open2(_pStream->codec, pCodec, null) < 0)
                throw new Exception("Could not open codec");

            _pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

            _packet = new AVPacket();

            fixed (AVPacket* pPacket = &_packet)
            {
                FFmpegInvoke.av_init_packet(pPacket);
            }

            _opened = true;
        }