Пример #1
0
        // sets up libavformat state: creates the AVFormatContext, the frames, etc. to start decoding, but does not actually start the decodingLoop
        private void prepareDecoding()
        {
            const int context_buffer_size = 4096;

            readPacketCallback = readPacket;
            seekCallback       = streamSeekCallbacks;
            // we shouldn't keep a reference to this buffer as it can be freed and replaced by the native libs themselves.
            // https://ffmpeg.org/doxygen/4.1/aviobuf_8c.html#a853f5149136a27ffba3207d8520172a5
            byte *contextBuffer = (byte *)ffmpeg.av_malloc(context_buffer_size);

            ioContext = ffmpeg.avio_alloc_context(contextBuffer, context_buffer_size, 0, (void *)handle.Handle, readPacketCallback, null, seekCallback);

            var fcPtr = ffmpeg.avformat_alloc_context();

            formatContext         = fcPtr;
            formatContext->pb     = ioContext;
            formatContext->flags |= AGffmpeg.AVFMT_FLAG_GENPTS; // required for most HW decoders as they only read `pts`

            int openInputResult = ffmpeg.avformat_open_input(&fcPtr, "dummy", null, null);

            inputOpened = openInputResult >= 0;
            if (!inputOpened)
            {
                throw new InvalidOperationException($"Error opening file or stream: {getErrorMessage(openInputResult)}");
            }

            int findStreamInfoResult = ffmpeg.avformat_find_stream_info(formatContext, null);

            if (findStreamInfoResult < 0)
            {
                throw new InvalidOperationException($"Error finding stream info: {getErrorMessage(findStreamInfoResult)}");
            }

            int streamIndex = ffmpeg.av_find_best_stream(formatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, null, 0);

            if (streamIndex < 0)
            {
                throw new InvalidOperationException($"Couldn't find video stream: {getErrorMessage(streamIndex)}");
            }

            stream            = formatContext->streams[streamIndex];
            timeBaseInSeconds = stream->time_base.GetValue();

            if (stream->duration > 0)
            {
                Duration = stream->duration * timeBaseInSeconds * 1000;
            }
            else
            {
                Duration = formatContext->duration / AGffmpeg.AV_TIME_BASE * 1000;
            }
        }
Пример #2
0
        private void ReleaseTheUnmanagedResources()
        {
            if (this.avioContextPtr != null)
            {
                ffmpeg.avformat_free_context(this.avFormatContextPtr);
                this.avFormatContextPtr = null;
            }

            if (this.avioContextPtr != null)
            {
                ffmpeg.avio_close(avioContextPtr);
                this.avioContextPtr = null;
            }
        }
Пример #3
0
        public unsafe void Dispose()
        {
            GC.SuppressFinalize(this);
            if (_context != null)
            {
                //free the allocated buffer
                //note: the internal buffer could have changed, and be != _buffer
                FfmpegCalls.AvFree((IntPtr)_context->buffer);

                //free the context itself
                FfmpegCalls.AvFree((IntPtr)_context);
                _context = null;
            }
        }
Пример #4
0
        private static unsafe MediaIO FromStream(Stream stream, int writeFlag, int bufferSize = 32768)
        {
            byte *buffer = (byte *)av_malloc((ulong)bufferSize);

            if (buffer == null)
            {
                throw FFmpegException.NoMemory("Failed to alloc MediaIO buffer");
            }
            var callbackObject = new
            {
                ReadPacket  = new avio_alloc_context_read_packet(Read),
                WritePacket = new avio_alloc_context_write_packet(Write),
                Seek        = new avio_alloc_context_seek(Seek)
            };
            AVIOContext *ctx = avio_alloc_context(buffer, bufferSize, writeFlag,
                                                  opaque: null,
                                                  read_packet: callbackObject.ReadPacket,
                                                  write_packet: callbackObject.WritePacket,
                                                  seek: callbackObject.Seek);

            if (ctx == null)
            {
                throw FFmpegException.NoMemory("Failed to alloc AVIOContext");
            }

            return(new StreamMediaIO(ctx, isOwner: true, callbackObject));

            int Read(void *opaque, byte *buffer, int length)
            {
                int c = stream.Read(new Span <byte>(buffer, length));

                return(c == 0 ? AVERROR_EOF : c);
            }

            int Write(void *opaque, byte *buffer, int length)
            {
                stream.Write(new Span <byte>(buffer, length));
                return(length);
            }

            long Seek(void *opaque, long position, int origin) => (MediaIOSeek)origin switch
            {
                MediaIOSeek.Begin => stream.Seek(position, SeekOrigin.Begin),
                MediaIOSeek.Current => stream.Seek(position, SeekOrigin.Current),
                MediaIOSeek.End => stream.Seek(position, SeekOrigin.End),
                MediaIOSeek.Size => stream.Length,
                _ => throw new NotSupportedException(),
            };
        }
Пример #5
0
 public void Dispose()
 {
     if (_ioContext != null)
     {
         var ioContext = _ioContext;
         if (usedAvioOpen)
         {
             ffmpeg.avio_close(ioContext);
         }
         else
         {
             ffmpeg.avio_context_free(&ioContext);
         }
         _ioContext = null;
     }
 }
Пример #6
0
        public AvStream(Stream stream)
        {
            _stream     = stream;
            _buffer     = new byte[_bufSize];
            this.read_l = new avio_alloc_context_read_packet(ReadPacket);
            this.seek_l = new avio_alloc_context_seek(SeekFunc);
            var inputBuffer = (byte *)ffmpeg.av_malloc((ulong)_bufSize);


            //create an unwritable context
            _context = ffmpeg.avio_alloc_context(inputBuffer,
                                                 _bufSize, 0, null,
                                                 read_l,
                                                 null,
                                                 seek_l
                                                 );
        }
Пример #7
0
        public unsafe AvioContext(
            FfmpegCalls.AvioReadData readDataCallback,
            FfmpegCalls.AvioSeek seekCallback,
            FfmpegCalls.AvioWriteData writeDataCallback)
        {
            _readDataCallback  = readDataCallback;
            _seekCallback      = seekCallback;
            _writeDataCallback = writeDataCallback;

            //make sure that the buffer won't be disposed
            //the buffer may change. we always have to free _context->buffer
            _buffer = new AvioBuffer {
                SuppressAvFree = true
            };
            _context = FfmpegCalls.AvioAllocContext(_buffer, _writeDataCallback != null, IntPtr.Zero,
                                                    _readDataCallback, _writeDataCallback, _seekCallback);
        }
 public FFmpegMovieReaderFileStreamIoContextWrapper(Stream fileStream)
 {
     fileDataStream     = fileStream;
     managedReadBuffer  = new byte[ReadBufferLength];
     pReadBuffer        = ffmpeg.av_malloc(ReadBufferLength);
     ReadPacketDelegate = ReadPacket;
     SeekPacketDelegate = Seek;
     pIoContext         = ffmpeg.avio_alloc_context(
         (byte *)pReadBuffer,
         ReadBufferLength,
         0,
         pReadBuffer,
         ReadPacketDelegate,
         WritePacketDelegate,
         SeekPacketDelegate);
     pIoContext->direct = 1;
 }
Пример #9
0
        public AVIOStream(Stream stream, FileAccess access)
        {
            this.stream = stream;
            Access      = access;
            if (CanRead && !stream.CanRead)
            {
                throw new ArgumentException("Can't read from stream");
            }
            if (CanWrite && !stream.CanWrite)
            {
                throw new ArgumentException("Can't write to stream");
            }

            byte *ioBuffer = (byte *)ffmpeg.av_malloc(DefaultBufferSize);

            if (ioBuffer == null)
            {
                throw new FFmpegException(ffmpeg.AVERROR(ffmpeg.ENOMEM), "Failed to allocate I/O buffer");
            }
            int writeFlag = 0;
            avio_alloc_context_read_packet  readPacket  = null;
            avio_alloc_context_write_packet writePacket = null;

            if (CanWrite)
            {
                writeFlag   = 1;
                writePacket = WritePacket;
                writePacketCallbackHandle = GCHandle.Alloc(writePacket);
            }
            if (CanRead)
            {
                readPacket = ReadPacket;
                readPacketCallbackHandle = GCHandle.Alloc(readPacket);
            }
            ioContext = ffmpeg.avio_alloc_context(ioBuffer, (int)DefaultBufferSize, writeFlag, null, readPacket, writePacket, null);
            if (ioContext == null)
            {
                ffmpeg.av_free(ioBuffer);
                throw new FFmpegException(ffmpeg.AVERROR(ffmpeg.ENOMEM), "Failed to allocate I/O context");
            }
        }
Пример #10
0
        public void Dispose(bool disposing)
        {
            if (isDisposed)
            {
                return;
            }

            // NOTE: The internal buffer may have changed
            // Do we need to handle this?

            ffmpeg.av_freep(&Pointer->buffer); // free the current buffer

            fixed(AVIOContext **p = &Pointer)
            {
                ffmpeg.avio_context_free(p);
            }

            Pointer = null;

            isDisposed = true;
        }
Пример #11
0
        protected override void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (baseStream != null)
                {
                    baseStream.Dispose();
                }
            }

            if (formatContext != null)
            {
                FF.avformat_close_input(ref formatContext);
            }

            if (ioContext != null)
            {
                FF.av_freep(&ioContext->Buffer);
                FF.av_free(ioContext);
                ioContext = null;
            }
        }
Пример #12
0
        // ReSharper restore PrivateFieldCanBeConvertedToLocalVariable

        public FormatContext(Stream stream)
        {
            _stream        = stream;
            _readFunc      = IoReadPacket;
            _seekFunc      = IoSeek;
            _getFormatFunc = GetFormat;
            // Both the buffer and the IO context are freed by avformat_close_input.
            byte *       ioBuffer  = (byte *)ffmpeg.av_malloc(IoBufferSize);
            AVIOContext *ioContext = ffmpeg.avio_alloc_context(
                ioBuffer, IoBufferSize,
                write_flag: 0, opaque: null,
                _readFunc, null, _seekFunc
                );

            AVFormatContext *ctx = ffmpeg.avformat_alloc_context();

            ctx->pb = ioContext;
            _ctx    = ctx;

            _recvPacket = ffmpeg.av_packet_alloc();
            CheckResult(ffmpeg.avformat_open_input(&ctx, string.Empty, null, null));
            CheckResult(ffmpeg.avformat_find_stream_info(ctx, null));
        }
Пример #13
0
        public MediaStream(Stream baseStream, bool write = false, AVOutputFormat *outputFormat = null)
        {
            if (write && !baseStream.CanWrite)
            {
                throw new ArgumentException($"流不能被写入,请确保Stream.CanWrite为true");
            }

            if (baseStream.CanRead)
            {
                procRead = Read;
            }
            if (write && baseStream.CanWrite)
            {
                procWrite = Write;
            }
            if (baseStream.CanSeek)
            {
                procSeek = Seek;
            }
            this.baseStream = baseStream;

            try {
                formatContext = FF.avformat_alloc_context();
                buffer        = (byte *)FF.av_malloc((IntPtr)bufferLength);
                ioContext     = FF.avio_alloc_context(buffer, bufferLength, write, null, procRead, procWrite, procSeek);
                if (write)
                {
                    formatContext->Oformat = outputFormat;
                    formatContext->Flags   = AVFmtFlag.CustomIO;
                }
                formatContext->Pb = ioContext;
            } catch {
                Dispose();
                throw;
            }
        }
/**
 * Open an output file and the required encoder.
 * Also set some basic encoder parameters.
 * Some of these parameters are based on the input file's parameters.
 * @param      filename              File to be opened
 * @param      input_codec_context   Codec context of input file
 * @param[out] output_format_context Format context of output file
 * @param[out] output_codec_context  Codec context of output file
 * @return Error code (0 if successful)
 */
    int open_output_file(string filename, AVCodecContext *input_codec_context,
                         AVFormatContext **output_format_context, AVCodecContext **output_codec_context)
    {
        AVCodecContext *avctx             = null;
        AVIOContext *   output_io_context = null;
        AVStream *      stream            = null;
        AVCodec *       output_codec      = null;
        int             error;

        /* Open the output file to write to it. */
        if ((error = avio_open(&output_io_context, filename,
                               AVIO_FLAG_WRITE)) < 0)
        {
            Console.WriteLine($"error: Could not open output file '{filename}' (error '{LibAVErrorToString(error)}')");
            return(error);
        }

        /* Create a new format context for the output container format. */
        if ((*output_format_context = avformat_alloc_context()) == null)
        {
            Console.WriteLine("error: Could not allocate output format context");
            return(AVERROR(ENOMEM));
        }

        /* Associate the output file (pointer) with the container format context. */
        (*output_format_context)->pb = output_io_context;

        /* Guess the desired container format based on the file extension. */
        if (((*output_format_context)->oformat = av_guess_format(null, filename, null)) == null)
        {
            Console.WriteLine("error: Could not find output file format");
            goto cleanup;
        }

        if (((*output_format_context)->url = av_strdup(filename)) == null)
        {
            Console.WriteLine("error: Could not allocate url.");
            error = AVERROR(ENOMEM);
            goto cleanup;
        }
        /* Find the encoder to be used by its name. */
        if ((output_codec = avcodec_find_encoder(AVCodecID.AV_CODEC_ID_VORBIS)) == null)
        {
            Console.WriteLine("error: Could not find a vorbis encoder.");
            goto cleanup;
        }
        /* Create a new audio stream in the output file container. */
        if ((stream = avformat_new_stream(*output_format_context, null)) == null)
        {
            Console.WriteLine("error: Could not create new stream");
            error = AVERROR(ENOMEM);
            goto cleanup;
        }
        avctx = avcodec_alloc_context3(output_codec);
        if (avctx == null)
        {
            Console.WriteLine("error: Could not allocate an encoding context");
            error = AVERROR(ENOMEM);
            goto cleanup;
        }

        /* Set the basic encoder parameters.
         * The input file's sample rate is used to avoid a sample rate conversion. */

        /* NOTE: These parameters are tailored for vorbis.
         * See https://ffmpeg.org/ffmpeg-codecs.html#libvorbis
         * Other codecs may need different parameters */
        avctx->channels       = 2;
        avctx->channel_layout = (ulong)av_get_default_channel_layout((int)avctx->channels);
        avctx->sample_rate    = input_codec_context->sample_rate;
        avctx->sample_fmt     = output_codec->sample_fmts[0];
        avctx->global_quality = 7;

        /* Set the sample rate for the container. */
        avctx->time_base.den = input_codec_context->sample_rate;
        avctx->time_base.num = 1;

        /* Some container formats (like MP4) require global headers to be present.
         * Mark the encoder so that it behaves accordingly. */
        if (((*output_format_context)->oformat->flags & AVFMT_GLOBALHEADER) != 0)
        {
            avctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
        }

        /* Open the encoder for the audio stream to use it later. */
        if ((error = avcodec_open2(avctx, output_codec, null)) < 0)
        {
            Console.WriteLine($"error: Could not open output codec (error '{LibAVErrorToString(error)}')");
            goto cleanup;
        }

        error = avcodec_parameters_from_context(stream->codecpar, avctx);
        if (error < 0)
        {
            Console.WriteLine("error: Could not initialize stream parameters");
            goto cleanup;
        }

        /* Save the encoder context for easier access later. */
        *output_codec_context = avctx;
        return(0);

cleanup:
        avcodec_free_context(&avctx);
        avio_closep(&(*output_format_context)->pb);
        avformat_free_context(*output_format_context);
        *output_format_context = null;
        return(error < 0 ? error : AVERROR_EXIT);
    }
Пример #15
0
 public static extern int avio_close_dyn_buf(AVIOContext *s, byte **pbuffer);
Пример #16
0
 public static extern int av_append_packet(AVIOContext *s, AVPacket *pkt, int size);
Пример #17
0
 public static extern int av_probe_input_buffer(AVIOContext *pb, AVInputFormat **fmt, byte *url, void *logctx, uint offset, uint max_probe_size);
Пример #18
0
        public AudioDecoder(DecoderSettings settings, string path, Stream IO)
        {
            m_settings = settings;

            _path = path;

            m_stream = (IO != null) ? IO : new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);

            switch (Environment.OSVersion.Platform)
            {
            case PlatformID.Win32NT:
            case PlatformID.Win32S:
            case PlatformID.Win32Windows:
                var myPath  = new Uri(typeof(AudioDecoder).Assembly.CodeBase).LocalPath;
                var current = System.IO.Path.GetDirectoryName(myPath);
                var probe   = Environment.Is64BitProcess ? "x64" : "win32";
                while (current != null)
                {
                    var ffmpegDirectory = System.IO.Path.Combine(current, probe);
                    if (Directory.Exists(ffmpegDirectory))
                    {
                        System.Diagnostics.Trace.WriteLine($"FFmpeg binaries found in: {ffmpegDirectory}");
                        RegisterLibrariesSearchPath(ffmpegDirectory);
                        break;
                    }
                    current = Directory.GetParent(current)?.FullName;
                }
                break;
                //case PlatformID.Unix:
                //case PlatformID.MacOSX:
                //    var libraryPath = Environment.GetEnvironmentVariable(LD_LIBRARY_PATH);
                //    RegisterLibrariesSearchPath(libraryPath);
                //    break;
            }

            pkt = ffmpeg.av_packet_alloc();
            if (pkt == null)
            {
                throw new Exception("Unable to initialize the decoder");
            }

            decoded_frame = ffmpeg.av_frame_alloc();
            if (decoded_frame == null)
            {
                throw new Exception("Could not allocate audio frame");
            }

            //ffmpeg.avcodec_register_all();
            ffmpeg.av_register_all();

#if DEBUG
            ffmpeg.av_log_set_level(ffmpeg.AV_LOG_DEBUG);

            av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
            {
                if (level > ffmpeg.av_log_get_level())
                {
                    return;
                }

                var lineSize    = 1024;
                var lineBuffer  = stackalloc byte[lineSize];
                var printPrefix = 1;
                ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
                var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
                System.Diagnostics.Trace.Write(line);
            };

            ffmpeg.av_log_set_callback(logCallback);
#endif

            m_read_packet_callback = readPacketCallback;
            m_seek_callback        = seekCallback;

            int ret;
            AVFormatContext *new_fmt_ctx = ffmpeg.avformat_alloc_context();
            if (new_fmt_ctx == null)
            {
                throw new Exception("ffmpeg.avformat_alloc_context() failed");
            }

            ulong avio_ctx_buffer_size = 65536;
            void *avio_ctx_buffer      = ffmpeg.av_malloc(avio_ctx_buffer_size);

            AVIOContext *avio_ctx = ffmpeg.avio_alloc_context((byte *)avio_ctx_buffer, (int)avio_ctx_buffer_size,
                                                              0, null, m_read_packet_callback, null, m_seek_callback);
            if (avio_ctx == null)
            {
                ffmpeg.avformat_free_context(new_fmt_ctx);
                throw new Exception("Cannot find stream information");
            }

            new_fmt_ctx->pb = avio_ctx;

            AVInputFormat *fmt = ffmpeg.av_find_input_format(m_settings.Format);
            if (fmt == null)
            {
                ffmpeg.avformat_free_context(new_fmt_ctx);
                throw new Exception($"Cannot find input format ${m_settings.Format}");
            }

            if ((ret = ffmpeg.avformat_open_input(&new_fmt_ctx, null, fmt, null)) < 0)
            {
                ffmpeg.avformat_free_context(new_fmt_ctx);
                ret.ThrowExceptionIfError();
            }

            if ((ret = ffmpeg.avformat_find_stream_info(new_fmt_ctx, null)) < 0)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                ret.ThrowExceptionIfError();
            }

#if FINDBESTSTREAM
            /* select the audio stream */
            ret = ffmpeg.av_find_best_stream(new_fmt_ctx, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &dec, 0);
            if (ret < 0)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                ret.ThrowExceptionIfError();
            }
#endif
            int matching_stream  = -1;
            int matching_streams = 0;
            for (int i = 0; i < (int)new_fmt_ctx->nb_streams; i++)
            {
                AVStream *stream_i = new_fmt_ctx->streams[i];
                if (stream_i->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO &&
                    (settings.StreamId == 0 || settings.StreamId == stream_i->id))
                {
                    matching_stream = i;
                    matching_streams++;
                }
            }

            if (matching_streams == 0)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                throw new Exception("No matching streams");
            }
            if (matching_streams != 1)
            {
                ffmpeg.avformat_close_input(&new_fmt_ctx);
                throw new Exception("More than one stream matches");
            }

            stream = new_fmt_ctx->streams[matching_stream];
            // Duration is unreliable for most codecs.
            //if (stream->duration > 0)
            //    _sampleCount = stream->duration;
            //else
            _sampleCount = -1;

            int bps = stream->codecpar->bits_per_raw_sample != 0 ?
                      stream->codecpar->bits_per_raw_sample :
                      stream->codecpar->bits_per_coded_sample;
            int   channels       = stream->codecpar->channels;
            int   sample_rate    = stream->codecpar->sample_rate;
            ulong channel_layout = stream->codecpar->channel_layout;
            pcm = new AudioPCMConfig(bps, channels, sample_rate, (AudioPCMConfig.SpeakerConfig)channel_layout);

            fmt_ctx = new_fmt_ctx;

            codec = ffmpeg.avcodec_find_decoder(stream->codecpar->codec_id);
            if (codec == null)
            {
                throw new Exception("Codec not found");
            }

            c = ffmpeg.avcodec_alloc_context3(codec);
            if (c == null)
            {
                throw new Exception("Could not allocate audio codec context");
            }
            // ffmpeg.av_opt_set_int(c, "refcounted_frames", 1, 0);
            ffmpeg.avcodec_parameters_to_context(c, stream->codecpar);

            c->request_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S32;

            /* open it */
            if (ffmpeg.avcodec_open2(c, null, null) < 0)
            {
                throw new Exception("Could not open codec");
            }

            m_decoded_frame_offset = 0;
            m_decoded_frame_size   = 0;
            _sampleOffset          = 0;
        }
Пример #19
0
 public static extern int avio_pause(AVIOContext *h, int pause);
Пример #20
0
 public static extern long avio_seek_time(AVIOContext *h, int stream_index, long timestamp, int flags);
Пример #21
0
 public static unsafe MediaIO FromNative(AVIOContext *p, bool isOwner) => new MediaIO(p, isOwner);
Пример #22
0
 public StreamMediaIO(AVIOContext *ptr, bool isOwner, object callbackObject) : base(ptr, isOwner)
 {
     _callbackObject = callbackObject;
 }
Пример #23
0
 public static extern int avio_handshake(AVIOContext *c);
 /// <summary>ftell() equivalent for AVIOContext.</summary>
 /// <returns>position or AVERROR.</returns>
 public static long avio_tell(AVIOContext * @s)
 {
     return(avio_seek(s, 0, 1));
 }
Пример #25
0
 public static extern int avio_close(AVIOContext *s);
Пример #26
0
 public static extern int avio_read_to_bprint(AVIOContext *h, IntPtr pb, IntPtr max_size);
Пример #27
0
        public int Open(string url, bool doAudio = true, bool doSubs = true, Stream stream = null, bool closeExternals = true)
        {
            if (url == null && stream == null)
            {
                return(-1);
            }

            if (type == Type.Video && closeExternals)
            {
                decCtx.aDemuxer.Close();
                decCtx.sDemuxer.Close();
            }

            Close(closeExternals);

            int ret;

            this.url = url;
            Log($"Opening {url}");

            // TODO: Expose AV Format Options to Settings
            AVDictionary *opt = null;

            // Reduce those on network streams for faster opening
            av_dict_set_int(&opt, "probesize", 116 * (long)1024 * 1024, 0);        // (Bytes) Default 5MB | Higher for weird formats (such as .ts?)
            av_dict_set_int(&opt, "analyzeduration", 333 * (long)1000 * 1000, 0);  // (Microseconds) Default 5 seconds | Higher for network streams
            //av_dict_set_int(&opt, "max_probe_packets ", 15500, 0);         // (Packets) Default 2500

            // Required for Youtube-dl to avoid 403 Forbidden (Saves them in case of re-open)
            headers   = decCtx.Headers;
            referer   = decCtx.Referer;
            userAgent = decCtx.UserAgent;

            if (headers != null && headers != "")
            {
                av_dict_set(&opt, "headers", headers, 0);
            }
            if (referer != null && referer != "")
            {
                av_dict_set(&opt, "referer", referer, 0);
            }
            if (userAgent != null && userAgent != "")
            {
                av_dict_set(&opt, "user_agent", userAgent, 0);
            }

            /* Issue with HTTP/TLS - (sample video -> https://www.youtube.com/watch?v=sExEvN1bPRo)
             *
             * Required probably only for AUDIO and some specific formats?
             *
             * [tls @ 0e691280] Error in the pull function.
             * [tls @ 0e691280] The specified session has been invalidated for some reason.
             * [DECTX AVMEDIA_TYPE_AUDIO] AVMEDIA_TYPE_UNKNOWN - Error[-0005], Msg: I/O error
             */

            av_dict_set_int(&opt, "reconnect", 1, 0);                   // auto reconnect after disconnect before EOF
            av_dict_set_int(&opt, "reconnect_streamed", 1, 0);          // auto reconnect streamed / non seekable streams
            av_dict_set_int(&opt, "reconnect_delay_max", 5, 0);         // max reconnect delay in seconds after which to give up
            //av_dict_set_int(&opt, "reconnect_on_network_error", 1, 0);
            //av_dict_set_int(&opt, "reconnect_at_eof", 1, 0);          // auto reconnect at EOF | Maybe will use this for another similar issues? | will not stop the decoders (no EOF)
            //av_dict_set_int(&opt, "multiple_requests", 1, 0);

            // RTSP
            av_dict_set(&opt, "rtsp_transport", "tcp", 0);              // Seems UDP causing issues (use this by default?)
            av_dict_set_int(&opt, "stimeout", 20 * 1000 * 1000, 0);     // RTSP microseconds timeout

            // hls more? | https://ffmpeg.org/ffmpeg-formats.html#toc-hls-1
            //av_dict_set_int(&opt, "max_reload", 1123123, 0);
            //av_dict_set_int(&opt, "m3u8_hold_counters", 1123123, 0);

            // misc
            //av_dict_set_int(&opt, "multiple_requests", 1, 0);
            //av_dict_set_int(&opt, "rw_timeout", 10 * 1000 * 1000, 0);
            //av_dict_set_int(&opt, "timeout", 10 * 1000 * 1000, 0);

            fmtCtx = avformat_alloc_context();
            fmtCtx->interrupt_callback.callback = interruptClbk;
            fmtCtx->interrupt_callback.opaque   = (void *)decCtx.decCtxPtr;
            fmtCtx->flags |= AVFMT_FLAG_DISCARD_CORRUPT;

            if (stream != null)
            {
                if (ioBuffer == null)
                {
                    ioBuffer = new byte[ioBufferSize];                    // NOTE: if we use small buffer ffmpeg might request more than we suggest
                }
                ioCtx          = avio_alloc_context((byte *)av_malloc(ioBufferSize), ioBufferSize, 0, (void *)decCtx.decCtxPtr, ioread, null, ioseek);
                fmtCtx->pb     = ioCtx;
                fmtCtx->flags |= AVFMT_FLAG_CUSTOM_IO;
                ioStream       = stream;
                ioStream.Seek(0, SeekOrigin.Begin);
            }

            AVFormatContext *fmtCtxPtr = fmtCtx;

            ret = avformat_open_input(&fmtCtxPtr, stream != null ? null : url, null, &opt);
            if (ret < 0)
            {
                Log($"[Format] [ERROR-1] {Utils.ErrorCodeToMsg(ret)} ({ret})"); return(ret);
            }

            // validate that we need this
            av_format_inject_global_side_data(fmtCtx);

            ret = avformat_find_stream_info(fmtCtx, null);
            if (ret < 0)
            {
                Log($"[Format] [ERROR-2] {Utils.ErrorCodeToMsg(ret)} ({ret})"); avformat_close_input(&fmtCtxPtr); return(ret);
            }

            StreamInfo.Fill(this);
            fmtName = Utils.BytePtrToStringUTF8(fmtCtx->iformat->long_name);

            // In case of multiple video streams (Youtube-dl manifest?)
            //if (decCtx.opt.video.PreferredHeight != -1 && type == Type.Video)
            //{
            //    ret = -1;
            //    var iresults =
            //        from    vstream in streams
            //        where   vstream.Type == AVMEDIA_TYPE_VIDEO && vstream.Height <= decCtx.opt.video.PreferredHeight
            //        orderby vstream.Height descending
            //        select  vstream;

            //    var results = iresults.ToList();
            //    if (results.Count != 0) ret = iresults.ToList()[0].StreamIndex;
            //}
            //if (ret == -1)

            ret = av_find_best_stream(fmtCtx, mType, -1, -1, null, 0);
            if (ret < 0)
            {
                Log($"[Format] [ERROR-3] {Utils.ErrorCodeToMsg(ret)} ({ret})"); avformat_close_input(&fmtCtxPtr); return(ret);
            }

            ret = decoder.Open(this, fmtCtx->streams[ret]);
            if (ret < 0)
            {
                avformat_close_input(&fmtCtxPtr); return(ret);
            }

            switch (mType)
            {
            case AVMEDIA_TYPE_VIDEO:

                ret = av_find_best_stream(fmtCtx, AVMEDIA_TYPE_AUDIO, -1, decoder.st->index, null, 0);
                if (ret >= 0)
                {
                    defaultAudioStream = ret;
                }

                if (doAudio)
                {
                    if (ret >= 0)
                    {
                        decCtx.aDecoder.Open(this, fmtCtx->streams[ret]);
                    }
                    else if (ret != AVERROR_STREAM_NOT_FOUND)
                    {
                        Log($"[Format] [ERROR-7] [Audio] {Utils.ErrorCodeToMsg(ret)} ({ret})");
                    }
                }

                if (doSubs)
                {
                    if ((ret = av_find_best_stream(fmtCtx, AVMEDIA_TYPE_SUBTITLE, -1, decoder.st->index, null, 0)) >= 0)
                    {
                        decCtx.sDecoder.Open(this, fmtCtx->streams[ret]);
                    }
                    else if (ret != AVERROR_STREAM_NOT_FOUND)
                    {
                        Log($"[Format] [ERROR-7] [Subs ] {Utils.ErrorCodeToMsg(ret)} ({ret})");
                    }
                }

                break;

            case AVMEDIA_TYPE_AUDIO:
                break;

            case AVMEDIA_TYPE_SUBTITLE:
                break;
            }

            RefreshStreams();

            if (demuxThread == null || !demuxThread.IsAlive)
            {
                demuxThread = new Thread(() => Demux());
                demuxThread.IsBackground = true;
                demuxThread.Start();
                while (status != Status.READY)
                {
                    Thread.Sleep(5);                            // Wait for thread to come up
                }
            }
            else
            {
                status = Status.READY;
            }

            pkt = av_packet_alloc();

            return(0);
        }
Пример #28
0
        public int Open(string url, bool doAudio = true, bool doSubs = true, Stream stream = null, bool closeExternals = true)
        {
            if (url == null && stream == null)
            {
                return(-1);
            }

            if (type == MediaType.Video && closeExternals)
            {
                decCtx.aDemuxer.Close();
                decCtx.sDemuxer.Close();
            }
            Close(closeExternals);

            int ret;

            this.url = url;
            Log($"Opening {url}");

            AVDictionary *opt = null;
            Dictionary <string, string> optPtr = decCtx.cfg.demuxer.GetFormatOptPtr(type);

            foreach (var t1 in optPtr)
            {
                av_dict_set(&opt, t1.Key, t1.Value, 0);
            }

            fmtCtx = avformat_alloc_context();
            fmtCtx->interrupt_callback.callback = interruptClbk;
            fmtCtx->interrupt_callback.opaque   = (void *)decCtx.decCtxPtr;
            fmtCtx->flags |= AVFMT_FLAG_DISCARD_CORRUPT;

            if (stream != null)
            {
                if (ioBuffer == null)
                {
                    ioBuffer = new byte[ioBufferSize];                    // NOTE: if we use small buffer ffmpeg might request more than we suggest
                }
                ioCtx          = avio_alloc_context((byte *)av_malloc(ioBufferSize), ioBufferSize, 0, (void *)decCtx.decCtxPtr, ioread, null, ioseek);
                fmtCtx->pb     = ioCtx;
                fmtCtx->flags |= AVFMT_FLAG_CUSTOM_IO;
                ioStream       = stream;
                ioStream.Seek(0, SeekOrigin.Begin);
            }

            AVFormatContext *fmtCtxPtr = fmtCtx;

            ret = avformat_open_input(&fmtCtxPtr, stream != null ? null : url, null, &opt);
            if (ret < 0)
            {
                Log($"[Format] [ERROR-1] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); return(ret);
            }

            // validate that we need this
            av_format_inject_global_side_data(fmtCtx);

            ret = avformat_find_stream_info(fmtCtx, null);
            if (ret < 0)
            {
                Log($"[Format] [ERROR-2] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); avformat_close_input(&fmtCtxPtr); return(ret);
            }

            DemuxerInfo.Fill(this);
            Log("\r\n[# Format] " + DemuxerInfo.GetDumpAll(this));

            // In case of multiple video streams select best Height based on current Display (fallback to default) | Maybe FPS too?
            ret = -1;
            if (type == MediaType.Video)
            {
                var iresults =
                    from vstream in streams
                    where   vstream.Type == AVMEDIA_TYPE_VIDEO && vstream.Height <= decCtx.renderer.Info.ScreenBounds.Height
                    orderby vstream.Height descending
                    select vstream;

                var results = iresults.ToList();
                if (results.Count != 0)
                {
                    ret = iresults.ToList()[0].StreamIndex;
                }
            }

            if (ret == -1)
            {
                ret = av_find_best_stream(fmtCtx, mType, -1, -1, null, 0);
            }
            if (ret < 0)
            {
                Log($"[Format] [ERROR-3] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})"); avformat_close_input(&fmtCtxPtr); return(ret);
            }

            ret = decoder.Open(this, fmtCtx->streams[ret]);
            if (ret < 0)
            {
                avformat_close_input(&fmtCtxPtr); return(ret);
            }

            switch (mType)
            {
            case AVMEDIA_TYPE_VIDEO:

                ret = av_find_best_stream(fmtCtx, AVMEDIA_TYPE_AUDIO, -1, decoder.st->index, null, 0);
                if (ret >= 0)
                {
                    defaultAudioStream = ret;
                }

                if (doAudio)
                {
                    if (ret >= 0)
                    {
                        decCtx.aDecoder.Open(this, fmtCtx->streams[ret]);
                    }
                    else if (ret != AVERROR_STREAM_NOT_FOUND)
                    {
                        Log($"[Format] [ERROR-7] [Audio] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})");
                    }
                }

                if (doSubs)
                {
                    if ((ret = av_find_best_stream(fmtCtx, AVMEDIA_TYPE_SUBTITLE, -1, decoder.st->index, null, 0)) >= 0)
                    {
                        decCtx.sDecoder.Open(this, fmtCtx->streams[ret]);
                    }
                    else if (ret != AVERROR_STREAM_NOT_FOUND)
                    {
                        Log($"[Format] [ERROR-7] [Subs ] {Utils.FFmpeg.ErrorCodeToMsg(ret)} ({ret})");
                    }
                }

                break;

            case AVMEDIA_TYPE_AUDIO:
                break;

            case AVMEDIA_TYPE_SUBTITLE:
                break;
            }

            RefreshStreams();

            if (demuxThread == null || !demuxThread.IsAlive)
            {
                demuxThread      = new Thread(() => Demux());
                demuxThread.Name = $"{type} Demuxer"; demuxThread.IsBackground = true; demuxThread.Start();
                while (status != Status.Paused)
                {
                    Thread.Sleep(5);                             // Wait for thread to come up
                }
            }
            else
            {
                status = Status.Paused;
            }

            pkt = av_packet_alloc();
            //Console.WriteLine($"CP: {decoder.codecCtx->colorspace} | PR: {decoder.codecCtx->color_primaries} | TRC: {decoder.codecCtx->color_trc} | CR: {decoder.codecCtx->color_range}");
            return(0);
        }
Пример #29
0
 internal protected unsafe DynamicMediaIO(AVIOContext *ptr, bool isOwner) : base(ptr, isOwner)
 {
 }
Пример #30
0
 public static extern int avio_accept(AVIOContext *s, AVIOContext **c);