示例#1
0
        public DecoderStream(MediaFile file, ref AVStream stream)
        {
            // Initialize instance variables
            m_disposed = false;
            m_position = m_bufferUsedSize = 0;
            m_file     = file;
            m_avStream = stream;

            m_avCodecCtx = *m_avStream.codec;

            // Open the decoding codec
            AVCodec *avCodec = FFmpeg.avcodec_find_decoder(m_avCodecCtx.codec_id);

            if (avCodec == null)
            {
                throw new DecoderException("No decoder found");
            }

            if (FFmpeg.avcodec_open(ref m_avCodecCtx, avCodec) < 0)
            {
                throw new DecoderException("Error opening codec");
            }

            m_codecOpen = true;
        }
示例#2
0
        public void Index_ReturnsNativeIndex()
        {
            var nativeStream = new NativeAVStream
            {
                index = 6,
            };

            var stream = new AVStream(&nativeStream);

            Assert.Equal(6, stream.Index);
        }
示例#3
0
        public void Metadata_ReturnsNull()
        {
            var nativeStream = new NativeAVStream
            {
                metadata = null,
            };

            var stream = new AVStream(&nativeStream);

            Assert.Null(stream.Metadata);
        }
示例#4
0
        public unsafe MediaFileReader(string Filename)
        {
            if (String.IsNullOrEmpty(Filename))
            {
                throw new ArgumentNullException("Filename");
            }

            m_filename = Filename;

            // Open the file with FFmpeg
            if (FFmpeg.avformat_open_input_file(out FormatContext, Filename) != AVError.OK)
            {
                throw new DecoderException("Couldn't open file");
            }

            //if (FFmpeg.av_find_stream_info(ref FormatContext) < AVError.OK)
            //    throw new DecoderException("Couldn't find stream info");

            if (FFmpeg.avformat_find_stream_info(ref FormatContext, null) < AVError.OK)
            {
                throw new DecoderException("Couldn't find stream info");
            }

            if (FormatContext.nb_streams < 1)
            {
                throw new DecoderException("No streams found");
            }

            FFmpeg.avcodec_register_all();

            m_streams = new SortedList <int, DecoderStream>();
            for (int i = 0; i < FormatContext.nb_streams; i++)
            {
                AVStream stream = *FormatContext.streams[i];

                switch (stream.codec->codec_type)
                {
                case AVMediaType.AVMEDIA_TYPE_VIDEO:
                    m_streams.Add(i, new VideoDecoderStream(this, ref stream));
                    break;

                case AVMediaType.AVMEDIA_TYPE_AUDIO:
                    m_streams.Add(i, new AudioDecoderStream(this, ref stream));
                    break;

                case AVMediaType.AVMEDIA_TYPE_UNKNOWN:
                case AVMediaType.AVMEDIA_TYPE_DATA:
                case AVMediaType.AVMEDIA_TYPE_SUBTITLE:
                default:
                    m_streams.Add(i, null);
                    break;
                }
            }
        }
示例#5
0
        public void Metadata_ReturnsNativeMetadata()
        {
            var nativeDictionary = new NativeAVDictionary
            {
            };

            var nativeStream = new NativeAVStream
            {
                metadata = &nativeDictionary,
            };

            var stream = new AVStream(&nativeStream);

            Assert.Empty(stream.Metadata);
        }
示例#6
0
        public void CodecContext_ReturnsNativeCodecContext()
        {
            var codecParameters = new NativeAVCodecParameters
            {
                codec_type = NativeAVMediaType.AVMEDIA_TYPE_VIDEO,
            };

            var nativeStream = new NativeAVStream
            {
                codecpar = &codecParameters,
            };

            var stream = new AVStream(&nativeStream);

            Assert.Equal(NativeAVMediaType.AVMEDIA_TYPE_VIDEO, stream.CodecParameters.Type);
        }
示例#7
0
        public void TimeBase_ReturnsNativeTimeBase()
        {
            var nativeTimeBase = new NativeAVRational
            {
                den = 100,
                num = 4,
            };

            var nativeStream = new NativeAVStream
            {
                time_base = nativeTimeBase,
            };

            var stream = new AVStream(&nativeStream);

            Assert.Equal(4, stream.TimeBase.num);
            Assert.Equal(100, stream.TimeBase.den);
        }
示例#8
0
        /// <summary>
        /// Constructs a new VideoDecoderStream over a specific filename.
        /// </summary>
        /// <param name="Filename">File to decode</param>
        internal VideoDecoderStream(MediaFileReader file, ref AVStream stream)
            : base(file, ref stream)
        {
            // allocate video frame
            m_avFrame = FFmpeg.avcodec_alloc_frame();
            if (FFmpeg.avpicture_alloc(out m_avPicture, m_avCodecCtx.pix_fmt, m_avCodecCtx.width, m_avCodecCtx.height) != 0)
            {
                throw new DecoderException("Error allocating AVPicture");
            }
            m_avPicture_allocated = true;

            int buffersize = FFmpeg.avpicture_get_size(m_avCodecCtx.pix_fmt, m_avCodecCtx.width, m_avCodecCtx.height);

            if (buffersize <= 0)
            {
                throw new DecoderException("Invalid size returned by avpicture_get_size");
            }

            m_buffer = new byte[buffersize];
        }
示例#9
0
        public void AVCodec_InitializesInstance()
        {
            var nativeCodec = new NativeAVCodec()
            {
                capabilities = (int)AVCodecCapabilities.Truncated,
            };

            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.FindDecoder(AVCodecID.AV_CODEC_ID_H264))
            .Returns((IntPtr)(&nativeCodec))
            .Verifiable();

            var codecParameters = new NativeAVCodecParameters
            {
                codec_type = NativeAVMediaType.AVMEDIA_TYPE_VIDEO,
                codec_id   = AVCodecID.AV_CODEC_ID_H264,
            };

            var nativeCodecContext = new NativeAVCodecContext()
            {
                codec_id = AVCodecID.AV_CODEC_ID_H264,
            };

#pragma warning disable CS0618 // Type or member is obsolete
            var nativeStream = new NativeAVStream
            {
                codecpar = &codecParameters,
                codec    = &nativeCodecContext,
            };

            var stream = new AVStream(&nativeStream);

            var ffmpeg = ffmpegMock.Object;
            var codec  = new AVCodec(ffmpeg, stream);

            Assert.Equal((int)AVCodecCapabilities.Truncated, stream.CodecContext->flags);
#pragma warning restore CS0618 // Type or member is obsolete
        }
        public AudioEncoderStream(string Filename, EncoderInformation EncoderInfo)
        {
            // Initialize instance variables
            m_filename = Filename;
            m_disposed = m_fileOpen = false;
            m_buffer   = new FifoMemoryStream();

            // Open FFmpeg
            FFmpeg.av_register_all();

            // Initialize the output format context
            m_avFormatCtx = FFmpeg.av_alloc_format_context();

            // Get output format
            m_avFormatCtx.oformat = FFmpeg.guess_format(EncoderInfo.Codec.ShortName, null, null);
            if (m_avFormatCtx.oformat != null)
            {
                throw new EncoderException("Could not find output format.");
            }

            FFmpeg.av_set_parameters(ref m_avFormatCtx, null);

            // Initialize the new output stream
            AVStream *stream = FFmpeg.av_new_stream(ref m_avFormatCtx, 1);

            if (stream == null)
            {
                throw new EncoderException("Could not alloc output audio stream");
            }

            m_avStream = *stream;

            // Initialize output codec context
            m_avCodecCtx = *m_avStream.codec;

            m_avCodecCtx.codec_id        = EncoderInfo.Codec.CodecID;
            m_avCodecCtx.codec_type      = CodecType.CODEC_TYPE_AUDIO;
            m_avCodecCtx.sample_rate     = EncoderInfo.SampleRate;
            m_avCodecCtx.channels        = EncoderInfo.Channels;
            m_avCodecCtx.bits_per_sample = EncoderInfo.SampleSize;
            m_avCodecCtx.bit_rate        = EncoderInfo.Bitrate;

            if (EncoderInfo.VBR)
            {
                m_avCodecCtx.flags         |= FFmpeg.CODEC_FLAG_QSCALE;
                m_avCodecCtx.global_quality = EncoderInfo.FFmpegQualityScale;
            }

            // Open codec
            AVCodec *outCodec = FFmpeg.avcodec_find_encoder(m_avCodecCtx.codec_id);

            if (outCodec == null)
            {
                throw new EncoderException("Could not find encoder");
            }

            if (FFmpeg.avcodec_open(ref m_avCodecCtx, outCodec) < 0)
            {
                throw new EncoderException("Could not open codec.");
            }

            // Open and prep file
            if (FFmpeg.url_fopen(ref m_avFormatCtx.pb, m_filename, FFmpeg.URL_WRONLY) < 0)
            {
                throw new EncoderException("Could not open output file.");
            }

            m_fileOpen = true;

            FFmpeg.av_write_header(ref m_avFormatCtx);
        }
示例#11
0
 public static extern void av_update_cur_dts(ref AVFormatContext pAVFormatContext, ref AVStream pAVStream, long timestamp);
示例#12
0
 public static extern AVError av_add_index_entry(ref AVStream pAVStream, long pos, long timestamp, int size, int distance, AVSEEK_FLAG flags);
示例#13
0
 public static extern AVError av_index_search_timestamp(ref AVStream pAVStream, long timestamp, AVSEEK_FLAG flags);
示例#14
0
 public static extern void av_set_pts_info(ref AVStream pAVStream, int pts_wrap_bits, uint pts_num, uint pts_den);
示例#15
0
 internal AudioDecoderStream(MediaFile file, ref AVStream stream)
     : base(file, ref stream)
 {
     m_buffer = new byte[FFmpeg.AVCODEC_MAX_AUDIO_FRAME_SIZE];
 }
示例#16
0
 public static extern void av_pkt_dump2(IntPtr pFile, ref AVPacket pAVPacket, [MarshalAs(UnmanagedType.Bool)] bool dump_payload, ref AVStream stream);
示例#17
0
        /// <summary>
        /// Is invoked, when the application is started.
        /// </summary>
        /// <param name="args">The command line arguments, that were passed to the application.</param>
        public static void Main(string[] args)
        {
            // Initializes the Codecs and formats
            LibAVFormat.av_register_all();

            // Asks the user for a flie name to load
            Console.Write("File name: ");
            string fileName = Console.ReadLine();

            // Loads a video
            IntPtr formatContextPointer;

            if (LibAVFormat.avformat_open_input(out formatContextPointer, fileName, IntPtr.Zero, IntPtr.Zero) < 0)
            {
                Console.WriteLine($"An error occurred while opening the video: {fileName}.");
                return;
            }
            AVFormatContext formatContext = Marshal.PtrToStructure <AVFormatContext>(formatContextPointer);

            Console.WriteLine($"Opened video file {formatContext.filename}.");

            // Retrieve stream information of the video
            if (LibAVFormat.avformat_find_stream_info(formatContextPointer, IntPtr.Zero) < 0)
            {
                Console.WriteLine("An error occurred while retrieving the stream information of the video.");
                return;
            }

            // Finds the first video stream in the video
            Console.WriteLine($"Found {formatContext.nb_streams} stream(s) in the video file.");
            int videoStreamId = -1;

            for (int i = 0; i < formatContext.nb_streams; i++)
            {
                AVStream       stream       = Marshal.PtrToStructure <AVStream>(Marshal.PtrToStructure <IntPtr>(IntPtr.Add(formatContext.streams, i * IntPtr.Size)));
                AVCodecContext codecContext = Marshal.PtrToStructure <AVCodecContext>(stream.codec);
                if (codecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoStreamId = i;
                    break;
                }
            }
            if (videoStreamId == -1)
            {
                Console.WriteLine("No video stream found.");
                return;
            }
            AVStream       videoStream       = Marshal.PtrToStructure <AVStream>(Marshal.PtrToStructure <IntPtr>(IntPtr.Add(formatContext.streams, videoStreamId * IntPtr.Size)));
            AVCodecContext videoCodecContext = Marshal.PtrToStructure <AVCodecContext>(videoStream.codec);

            // Finds the decoder for the video stream
            IntPtr codecPointer = LibAVCodec.avcodec_find_decoder(videoCodecContext.codec_id);

            if (codecPointer == IntPtr.Zero)
            {
                Console.WriteLine("The video codec is not supported.");
                return;
            }
            AVCodec videoCodec = Marshal.PtrToStructure <AVCodec>(codecPointer);

            Console.WriteLine($"Using the {videoCodec.long_name} codec to decode the video stream.");

            // Opens the codec for the video stream
            if (LibAVCodec.avcodec_open2(videoStream.codec, codecPointer, IntPtr.Zero) < 0)
            {
                Console.WriteLine("The codec {videoCodec.long_name} could not be opened.");
                return;
            }
            Console.WriteLine("Successfully loaded codec.");

            // Allocates video frames for the original decoded frame and the frame in RGB (which is then later stored in a file)
            IntPtr framePointer    = LibAVUtil.av_frame_alloc();
            IntPtr frameRgbPointer = LibAVUtil.av_frame_alloc();

            // Determines the required buffer size and allocates the buffer for the RGB frame
            int    numBytes = LibAVCodec.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_RGB24, videoCodecContext.width, videoCodecContext.height);
            IntPtr buffer   = LibAVUtil.av_malloc(new UIntPtr((uint)(numBytes * sizeof(byte))));

            // Assigns appropriate parts of buffer to image planes in frameRgb, note that frameRgb is an AVFrame, but AVFrame is a superset of AVPicture
            LibAVCodec.avpicture_fill(frameRgbPointer, buffer, AVPixelFormat.AV_PIX_FMT_RGB24, videoCodecContext.width, videoCodecContext.height);
            AVFrame frameRgb = Marshal.PtrToStructure <AVFrame>(frameRgbPointer);

            // Cycles over all frames of the video and dumps the frames to file
            Console.WriteLine("Decoding vidoe frames...");
            int    frameIndex    = 0;
            IntPtr packetPointer = Marshal.AllocHGlobal(Marshal.SizeOf <AVPacket>());

            while (LibAVFormat.av_read_frame(formatContextPointer, packetPointer) >= 0)
            {
                AVPacket packet = Marshal.PtrToStructure <AVPacket>(packetPointer);
                if (packet.stream_index == videoStreamId)
                {
                    // Decodes video frame
                    int frameFinished = 0;
                    LibAVCodec.avcodec_decode_video2(videoStream.codec, framePointer, ref frameFinished, packetPointer);
                    AVFrame frame = Marshal.PtrToStructure <AVFrame>(framePointer);

                    // Checks if the video frame was properly decoded
                    if (frameFinished != 0)
                    {
                        // Converts the image from its native format to RGB
                        IntPtr scaleContextPointer = LibSwScale.sws_getContext(videoCodecContext.width, videoCodecContext.height, videoCodecContext.pix_fmt,
                                                                               videoCodecContext.width, videoCodecContext.height, AVPixelFormat.AV_PIX_FMT_RGB24, ScalingFlags.SWS_BILINEAR, IntPtr.Zero,
                                                                               IntPtr.Zero, IntPtr.Zero);
                        LibSwScale.sws_scale(scaleContextPointer, frame.data, frame.linesize, 0, videoCodecContext.height, frameRgb.data, frameRgb.linesize);
                        frameRgb = Marshal.PtrToStructure <AVFrame>(frameRgbPointer);

                        // Checks if this is one of the first 5 frames, if so then it is stored to disk
                        frameIndex++;
                        if (frameIndex > 24 && frameIndex <= 30)
                        {
                            Console.WriteLine($"Writing frame {frameIndex} to file...");
                            string frameFileName = Path.Combine(Path.GetDirectoryName(fileName), $"frame-{frameIndex}.ppm");
                            Program.SaveFrame(frameRgb, videoCodecContext.width, videoCodecContext.height, frameFileName);
                        }
                    }
                }

                // Frees the packet that was allocated by av_read_frame
                LibAVCodec.av_free_packet(packetPointer);
            }
            Console.WriteLine("Finished decoding of the video.");

            // Frees and closes all acquired resources
            LibAVUtil.av_free(buffer);
            LibAVUtil.av_free(frameRgbPointer);
            LibAVUtil.av_free(framePointer);
            LibAVCodec.avcodec_close(videoStream.codec);
            IntPtr formatContextPointerPointer = Marshal.AllocHGlobal(Marshal.SizeOf <IntPtr>());

            Marshal.StructureToPtr(formatContextPointer, formatContextPointerPointer, false);
            LibAVFormat.avformat_close_input(formatContextPointerPointer);
            Marshal.FreeHGlobal(formatContextPointerPointer);
            Console.WriteLine("Freed all acquired resources.");
        }