コード例 #1
0
        public IOContext(Stream stream, bool writable = false)
        {
            this.stream = stream ?? throw new ArgumentNullException(nameof(stream));

            ulong paddingLength = writable ? 0ul : ffmpeg.AV_INPUT_BUFFER_PADDING_SIZE; // reversed

            // note: this can be replaced...
            var buffer = (byte *)ffmpeg.av_malloc(bufferSize + paddingLength);

            // reference to prevent garbage collection
            read  = Read;
            write = Write;
            seek  = Seek;

            Pointer = ffmpeg.avio_alloc_context(
                buffer: buffer,
                buffer_size: bufferSize,
                write_flag: writable ? 1 : 0,
                opaque: null,
                read_packet: read,
                write_packet: write,
                seek: seek
                );

            Pointer->seekable = ffmpeg.AVIO_SEEKABLE_NORMAL; //  | ffmpeg.AVIO_SEEKABLE_TIME;
        }
コード例 #2
0
        public MediaStream(Stream baseStream, bool write = false, AVOutputFormat *outputFormat = null)
        {
            if (write && !baseStream.CanWrite)
            {
                throw new ArgumentException($"流不能被写入,请确保Stream.CanWrite为true");
            }

            if (baseStream.CanRead)
            {
                procRead = Read;
            }
            if (write && baseStream.CanWrite)
            {
                procWrite = Write;
            }
            if (baseStream.CanSeek)
            {
                procSeek = Seek;
            }
            this.baseStream = baseStream;

            try {
                formatContext = FF.avformat_alloc_context();
                var buffer = (byte *)FF.av_malloc((IntPtr)bufferLength);
                ioContext = FF.avio_alloc_context(buffer, bufferLength, write, null, procRead, procWrite, procSeek);
                if (write)
                {
                    formatContext->Oformat = outputFormat;
                }
                formatContext->Pb = ioContext;
            } catch {
                Dispose();
                throw;
            }
        }
コード例 #3
0
 private InputContainer(AVFormatContext *formatContext, avio_alloc_context_read_packet read, avio_alloc_context_seek seek, int bufferSizeLimit)
     : base(formatContext)
 {
     Decoders      = new Decoder[Pointer->nb_streams];
     MaxBufferSize = bufferSizeLimit;
     readCallback  = read;
     seekCallBack  = seek;
 }
コード例 #4
0
        protected virtual void Dispose(bool disposing)
        {
            if (isDisposed)
            {
                return;
            }

            isDisposed = true;

            videoStream.Dispose();
            videoStream = null;

            while (decoderCommands.TryDequeue(out var _))
            {
            }

            StopDecoding(true);

            if (formatContext != null)
            {
                fixed(AVFormatContext **ptr = &formatContext)
                ffmpeg.avformat_close_input(ptr);
            }

            seekCallback         = null;
            readPacketCallback   = null;
            managedContextBuffer = null;

            // gets freed by libavformat when closing the input
            contextBuffer = null;

            if (frame != null)
            {
                fixed(AVFrame **ptr = &frame)
                ffmpeg.av_frame_free(ptr);
            }

            if (frameRgb != null)
            {
                fixed(AVFrame **ptr = &frameRgb)
                ffmpeg.av_frame_free(ptr);
            }

            if (frameRgbBufferPtr != IntPtr.Zero)
            {
                Marshal.FreeHGlobal(frameRgbBufferPtr);
                frameRgbBufferPtr = IntPtr.Zero;
            }

            while (decodedFrames.TryDequeue(out var f))
            {
                f.Texture.Dispose();
            }
        }
コード例 #5
0
 public unsafe ByteReader()
 {
     _currentPosition = 0;
     _readDelegate    = Read;
     _seekDelegate    = Seek;
     ReadFunc         = new avio_alloc_context_read_packet_func {
         Pointer = Marshal.GetFunctionPointerForDelegate(_readDelegate)
     };
     SeekFunc = new avio_alloc_context_seek_func {
         Pointer = Marshal.GetFunctionPointerForDelegate(_seekDelegate)
     };
 }
コード例 #6
0
        // sets up libavformat state: creates the AVFormatContext, the frames, etc. to start decoding, but does not actually start the decodingLoop
        private void prepareDecoding()
        {
            const int context_buffer_size = 4096;

            readPacketCallback = readPacket;
            seekCallback       = streamSeekCallbacks;
            // we shouldn't keep a reference to this buffer as it can be freed and replaced by the native libs themselves.
            // https://ffmpeg.org/doxygen/4.1/aviobuf_8c.html#a853f5149136a27ffba3207d8520172a5
            byte *contextBuffer = (byte *)ffmpeg.av_malloc(context_buffer_size);

            ioContext = ffmpeg.avio_alloc_context(contextBuffer, context_buffer_size, 0, (void *)handle.Handle, readPacketCallback, null, seekCallback);

            var fcPtr = ffmpeg.avformat_alloc_context();

            formatContext         = fcPtr;
            formatContext->pb     = ioContext;
            formatContext->flags |= AGffmpeg.AVFMT_FLAG_GENPTS; // required for most HW decoders as they only read `pts`

            int openInputResult = ffmpeg.avformat_open_input(&fcPtr, "dummy", null, null);

            inputOpened = openInputResult >= 0;
            if (!inputOpened)
            {
                throw new InvalidOperationException($"Error opening file or stream: {getErrorMessage(openInputResult)}");
            }

            int findStreamInfoResult = ffmpeg.avformat_find_stream_info(formatContext, null);

            if (findStreamInfoResult < 0)
            {
                throw new InvalidOperationException($"Error finding stream info: {getErrorMessage(findStreamInfoResult)}");
            }

            int streamIndex = ffmpeg.av_find_best_stream(formatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, null, 0);

            if (streamIndex < 0)
            {
                throw new InvalidOperationException($"Couldn't find video stream: {getErrorMessage(streamIndex)}");
            }

            stream            = formatContext->streams[streamIndex];
            timeBaseInSeconds = stream->time_base.GetValue();

            if (stream->duration > 0)
            {
                Duration = stream->duration * timeBaseInSeconds * 1000;
            }
            else
            {
                Duration = formatContext->duration / AGffmpeg.AV_TIME_BASE * 1000;
            }
        }
コード例 #7
0
ファイル: AvStream.cs プロジェクト: sss-software/SharpAudio
        public AvStream(Stream stream)
        {
            _stream     = stream;
            _buffer     = new byte[_bufSize];
            this.read_l = new avio_alloc_context_read_packet(ReadPacket);
            this.seek_l = new avio_alloc_context_seek(SeekFunc);
            var inputBuffer = (byte *)ffmpeg.av_malloc((ulong)_bufSize);


            //create an unwritable context
            _context = ffmpeg.avio_alloc_context(inputBuffer,
                                                 _bufSize, 0, null,
                                                 read_l,
                                                 null,
                                                 seek_l
                                                 );
        }
 public FFmpegMovieReaderFileStreamIoContextWrapper(Stream fileStream)
 {
     fileDataStream     = fileStream;
     managedReadBuffer  = new byte[ReadBufferLength];
     pReadBuffer        = ffmpeg.av_malloc(ReadBufferLength);
     ReadPacketDelegate = ReadPacket;
     SeekPacketDelegate = Seek;
     pIoContext         = ffmpeg.avio_alloc_context(
         (byte *)pReadBuffer,
         ReadBufferLength,
         0,
         pReadBuffer,
         ReadPacketDelegate,
         WritePacketDelegate,
         SeekPacketDelegate);
     pIoContext->direct = 1;
 }
コード例 #9
0
        /// <summary>
        /// Opens a media container and stream codecs from given stream.
        /// </summary>
        /// <param name="stream">A stream of the multimedia file.</param>
        /// <param name="options">The media settings.</param>
        /// <returns>A new instance of the <see cref="InputContainer"/> class.</returns>
        public static InputContainer LoadStream(Stream stream, MediaOptions options)
        {
            return(MakeContainer(null, options, context =>
            {
                var avioStream = new AvioStream(stream);

                // Prevents garbage collection
                readCallback = avioStream.Read;
                seekCallback = avioStream.Seek;

                int bufferLength = 4096;
                var avioBuffer = (byte *)ffmpeg.av_malloc((ulong)bufferLength);

                context->pb = ffmpeg.avio_alloc_context(avioBuffer, bufferLength, 0, null, readCallback, null, seekCallback);
                if (context->pb == null)
                {
                    throw new FFmpegException("Cannot allocate AVIOContext.");
                }
            }));
        }
コード例 #10
0
ファイル: FormatContext.cs プロジェクト: yvanoff/nitrosharp
        // ReSharper restore PrivateFieldCanBeConvertedToLocalVariable

        public FormatContext(Stream stream)
        {
            _stream        = stream;
            _readFunc      = IoReadPacket;
            _seekFunc      = IoSeek;
            _getFormatFunc = GetFormat;
            // Both the buffer and the IO context are freed by avformat_close_input.
            byte *       ioBuffer  = (byte *)ffmpeg.av_malloc(IoBufferSize);
            AVIOContext *ioContext = ffmpeg.avio_alloc_context(
                ioBuffer, IoBufferSize,
                write_flag: 0, opaque: null,
                _readFunc, null, _seekFunc
                );

            AVFormatContext *ctx = ffmpeg.avformat_alloc_context();

            ctx->pb = ioContext;
            _ctx    = ctx;

            _recvPacket = ffmpeg.av_packet_alloc();
            CheckResult(ffmpeg.avformat_open_input(&ctx, string.Empty, null, null));
            CheckResult(ffmpeg.avformat_find_stream_info(ctx, null));
        }
コード例 #11
0
        // sets up libavformat state: creates the AVFormatContext, the frames, etc. to start decoding, but does not actually start the decodingLoop
        private void prepareDecoding()
        {
            const int context_buffer_size = 4096;

            // the first call to FFmpeg will throw an exception if the libraries cannot be found
            // this will be safely handled in StartDecoding()
            var fcPtr = ffmpeg.avformat_alloc_context();

            formatContext        = fcPtr;
            contextBuffer        = (byte *)ffmpeg.av_malloc(context_buffer_size);
            managedContextBuffer = new byte[context_buffer_size];
            readPacketCallback   = readPacket;
            seekCallback         = streamSeekCallbacks;
            formatContext->pb    = ffmpeg.avio_alloc_context(contextBuffer, context_buffer_size, 0, (void *)handle.Handle, readPacketCallback, null, seekCallback);

            int openInputResult = ffmpeg.avformat_open_input(&fcPtr, "dummy", null, null);

            inputOpened = openInputResult >= 0;
            if (!inputOpened)
            {
                throw new InvalidOperationException($"Error opening file or stream: {getErrorMessage(openInputResult)}");
            }

            int findStreamInfoResult = ffmpeg.avformat_find_stream_info(formatContext, null);

            if (findStreamInfoResult < 0)
            {
                throw new InvalidOperationException($"Error finding stream info: {getErrorMessage(findStreamInfoResult)}");
            }

            var nStreams = formatContext->nb_streams;

            for (var i = 0; i < nStreams; ++i)
            {
                stream = formatContext->streams[i];

                codecParams = *stream->codecpar;

                if (codecParams.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    duration = stream->duration <= 0 ? formatContext->duration : stream->duration;

                    timeBaseInSeconds = stream->time_base.GetValue();
                    var codecPtr = ffmpeg.avcodec_find_decoder(codecParams.codec_id);
                    if (codecPtr == null)
                    {
                        throw new InvalidOperationException($"Couldn't find codec with id: {codecParams.codec_id}");
                    }

                    int openCodecResult = ffmpeg.avcodec_open2(stream->codec, codecPtr, null);
                    if (openCodecResult < 0)
                    {
                        throw new InvalidOperationException($"Error trying to open codec with id {codecParams.codec_id}: {getErrorMessage(openCodecResult)}");
                    }

                    break;
                }
            }

            prepareFilters();
        }
コード例 #12
0
        // sets up libavformat state: creates the AVFormatContext, the frames, etc. to start decoding, but does not actually start the decodingLoop
        private void prepareDecoding()
        {
            const int context_buffer_size = 4096;

            var fcPtr = ffmpeg.avformat_alloc_context();

            formatContext        = fcPtr;
            contextBuffer        = (byte *)ffmpeg.av_malloc(context_buffer_size);
            managedContextBuffer = new byte[context_buffer_size];
            readPacketCallback   = readPacket;
            seekCallback         = seek;
            formatContext->pb    = ffmpeg.avio_alloc_context(contextBuffer, context_buffer_size, 0, null, readPacketCallback, null, seekCallback);
            if (ffmpeg.avformat_open_input(&fcPtr, "dummy", null, null) < 0)
            {
                throw new Exception("Error opening file.");
            }

            if (ffmpeg.avformat_find_stream_info(formatContext, null) < 0)
            {
                throw new Exception("Could not find stream info.");
            }

            var nStreams = formatContext->nb_streams;

            for (var i = 0; i < nStreams; ++i)
            {
                stream = formatContext->streams[i];

                codecParams = *stream->codecpar;
                if (codecParams.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    timeBaseInSeconds = stream->time_base.GetValue();
                    var codecPtr = ffmpeg.avcodec_find_decoder(codecParams.codec_id);
                    if (codecPtr == null)
                    {
                        throw new Exception("Could not find codec.");
                    }

                    if (ffmpeg.avcodec_open2(stream->codec, codecPtr, null) < 0)
                    {
                        throw new Exception("Could not open codec.");
                    }

                    frame    = ffmpeg.av_frame_alloc();
                    frameRgb = ffmpeg.av_frame_alloc();

                    uncompressedFrameSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_RGBA, codecParams.width, codecParams.height, 1);
                    frameRgbBufferPtr     = Marshal.AllocHGlobal(uncompressedFrameSize);

                    var dataArr4     = *(byte_ptrArray4 *)&frameRgb->data;
                    var linesizeArr4 = *(int_array4 *)&frameRgb->linesize;
                    var result       = ffmpeg.av_image_fill_arrays(ref dataArr4, ref linesizeArr4, (byte *)frameRgbBufferPtr, AVPixelFormat.AV_PIX_FMT_RGBA, codecParams.width, codecParams.height, 1);
                    if (result < 0)
                    {
                        throw new Exception("Could not fill image arrays");
                    }

                    for (uint j = 0; j < byte_ptrArray4.Size; ++j)
                    {
                        frameRgb->data[j]     = dataArr4[j];
                        frameRgb->linesize[j] = linesizeArr4[j];
                    }

                    break;
                }
            }
        }
コード例 #13
0
        private unsafe void Ffmpeg_Initialize()
        {
            var inputBuffer = (byte *)ffmpeg.av_malloc((ulong)fsStreamSize);

            avioRead = Read;
            avioSeek = Seek;

            ff.ioContext = ffmpeg.avio_alloc_context(inputBuffer, fsStreamSize, 0, null, avioRead, null, avioSeek);

            if ((int)ff.ioContext == 0)
            {
                throw new FormatException("FFMPEG: Unable to allocate IO stream context.");
            }

            ff.format_context         = ffmpeg.avformat_alloc_context();
            ff.format_context->pb     = ff.ioContext;
            ff.format_context->flags |= ffmpeg.AVFMT_FLAG_CUSTOM_IO;

            fixed(AVFormatContext **fmt2 = &ff.format_context)
            if (ffmpeg.avformat_open_input(fmt2, "", null, null) != 0)
            {
                throw new FormatException("FFMPEG: Could not open media stream.");
            }

            if (ffmpeg.avformat_find_stream_info(ff.format_context, null) < 0)
            {
                throw new FormatException("FFMPEG: Could not retrieve stream info from IO stream");
            }

            // Find the index of the first audio stream
            this.stream_index = -1;
            for (int i = 0; i < ff.format_context->nb_streams; i++)
            {
                if (ff.format_context->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                {
                    stream_index = i;
                    break;
                }
            }
            if (stream_index == -1)
            {
                throw new FormatException("FFMPEG: Could not retrieve audio stream from IO stream.");
            }

            ff.av_stream = ff.format_context->streams[stream_index];
            ff.av_codec  = ff.av_stream->codec;

            if (ffmpeg.avcodec_open2(ff.av_codec, ffmpeg.avcodec_find_decoder(ff.av_codec->codec_id), null) < 0)
            {
                throw new FormatException("FFMPEG: Failed to open decoder for stream #{stream_index} in IO stream.");
            }

            // Fixes SWR @ 0x2192200] Input channel count and layout are unset error.
            if (ff.av_codec->channel_layout == 0)
            {
                ff.av_codec->channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(ff.av_codec->channels);
            }

            ff.av_codec->request_channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(ff.av_codec->channels);
            ff.av_codec->request_sample_fmt     = _DESIRED_SAMPLE_FORMAT;

            SetAudioFormat();

            ff.swr_context = ffmpeg.swr_alloc_set_opts(null,
                                                       ffmpeg.av_get_default_channel_layout(_DESIRED_CHANNEL_COUNT),
                                                       _DESIRED_SAMPLE_FORMAT,
                                                       _DESIRED_SAMPLE_RATE,
                                                       (long)ff.av_codec->channel_layout,
                                                       ff.av_codec->sample_fmt,
                                                       ff.av_codec->sample_rate,
                                                       0,
                                                       null);

            ffmpeg.swr_init(ff.swr_context);

            if (ffmpeg.swr_is_initialized(ff.swr_context) == 0)
            {
                throw new FormatException($"FFMPEG: Resampler has not been properly initialized");
            }

            ff.av_src_packet = ffmpeg.av_packet_alloc();
            ff.av_src_frame  = ffmpeg.av_frame_alloc();

            this.tempSampleBuf = new byte[(int)(_audioFormat.SampleRate * _audioFormat.Channels * 2)];

            this._slidestream = new CircularBuffer(tempSampleBuf.Length);

            AVDictionaryEntry *tag = null;

            this._audioMetaData      = new AudioMetadata();
            _audioMetaData.ExtraData = new Dictionary <string, string>();


            do
            {
                tag = ffmpeg.av_dict_get(ff.format_context->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX);

                if (tag == null)
                {
                    break;
                }

                var key = Marshal.PtrToStringAuto((IntPtr)tag->key);
                var val = Marshal.PtrToStringAuto((IntPtr)tag->value);

                switch (key.ToLowerInvariant().Trim())
                {
                case "title":
                    _audioMetaData.Title = val;
                    break;

                case "artist":
                case "artists":
                case "author":
                case "composer":
                    if (_audioMetaData.Artists is null)
                    {
                        _audioMetaData.Artists = new List <string>();
                    }

                    _audioMetaData.Artists.AddRange(val.Split(',', StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()));
                    break;

                case "album":
                    _audioMetaData.Album = val;
                    break;

                case "genre":
                    if (_audioMetaData.Genre is null)
                    {
                        _audioMetaData.Genre = new List <string>();
                    }

                    _audioMetaData.Genre.AddRange(val.Split(',', StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()));
                    break;

                case "year":
                    _audioMetaData.Year = val;
                    break;

                default:
                    _audioMetaData.ExtraData.Add(key, val);
                    break;
                }
            } while (true);

            if (_audioMetaData.Artists != null)
            {
                _audioMetaData.Artists = _audioMetaData.Artists.GroupBy(x => x).Select(y => y.First()).ToList();
            }
        }