Beispiel #1
0
        public void SendPacket_ThrowsOnNoDecoder()
        {
            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.IsCodecOpen(It.IsAny <IntPtr>()))
            .Returns(true)
            .Verifiable();

            ffmpegMock
            .Setup(c => c.IsDecoder(It.IsAny <IntPtr>()))
            .Returns(false)
            .Verifiable();

            ffmpegMock
            .Setup(c => c.SendPacket(It.IsAny <IntPtr>(), It.IsAny <IntPtr>()))
            .Returns(0)
            .Verifiable();

            var ffmpeg = ffmpegMock.Object;

            var nativeCodec        = ffmpeg.FindDecoder(AVCodecID.AV_CODEC_ID_H264);
            var nativeCodecContext = new NativeAVCodecContext
            {
            };

            var codec = new AVCodec(ffmpeg, &nativeCodecContext, (NativeAVCodec *)nativeCodec);

            Assert.Throws <InvalidOperationException>(() => codec.SendPacket(new AVPacket(ffmpeg)));
        }
Beispiel #2
0
        public void SendPacket_ThrowsOnSendPacketFail()
        {
            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.IsCodecOpen(It.IsAny <IntPtr>()))
            .Returns(true)
            .Verifiable();

            ffmpegMock
            .Setup(c => c.IsDecoder(It.IsAny <IntPtr>()))
            .Returns(true)
            .Verifiable();

            ffmpegMock
            .Setup(c => c.SendPacket(It.IsAny <IntPtr>(), It.IsAny <IntPtr>()))
            .Returns(-100)
            .Verifiable();
            ffmpegMock
            .Setup(c => c.ThrowOnAVError(-100, true))
            .Verifiable();

            var ffmpeg = ffmpegMock.Object;

            var nativeCodec        = ffmpeg.FindDecoder(AVCodecID.AV_CODEC_ID_H264);
            var nativeCodecContext = new NativeAVCodecContext
            {
            };

            var codec = new AVCodec(ffmpeg, &nativeCodecContext, (NativeAVCodec *)nativeCodec);

            codec.SendPacket(new AVPacket(ffmpeg));
        }
Beispiel #3
0
        public void TestDecode()
        {
            _codecContext.Open2(AVCodec.FindDecoder(_codecContext.CodecId));
            int      frames = 0;
            AVPacket packet;
            var      frame = new AVFrame();

            while ((packet = _formatContext.ReadFrame()) != null)
            {
                if (packet.StreamIndex != 0)
                {
                    continue;
                }
                bool picture = _codecContext.DecodeVideo2(packet, frame);
                if (!picture)
                {
                    continue;
                }

                frames++;

                // to check the frames visually, not part of normal test
                // PgmSave(frames + ".pgm", frame);
            }
            Assert.AreEqual(245, frames);
        }
Beispiel #4
0
 public void TestOpen()
 {
     Assert.IsNull(_codecContext.Codec);
     _codecContext.Open2(AVCodec.FindDecoder(_codecContext.CodecId));
     Assert.IsNotNull(_codecContext.Codec);
     Assert.AreEqual("h264", _codecContext.Codec.Name);
 }
Beispiel #5
0
        public void TestEncode()
        {
            AVLog.Callback += (level, msg) => Console.WriteLine(level + ": " + msg);
            AVLog.Level     = (int)AVLogLevel.Trace;

            _codecContext.Open2(AVCodec.FindDecoder(_codecContext.CodecId));
            AVPacket packet;
            var      frame = new AVFrame();

            var codec         = AVCodec.FindEncoderByName("png");
            var encodeContext = new AVCodecContext(codec)
            {
                PixelFormat = AVPixelFormat.RGB24,
                Width       = _codecContext.Width,
                Height      = _codecContext.Height
            };

            encodeContext.Open2(codec);

            var convContext = SWSContext.GetContext(_codecContext.Width, _codecContext.Height, AVPixelFormat.YUV420P,
                                                    AVPixelFormat.RGB24);
            var convBuffer = new AVPicture(AVPixelFormat.RGB24, _codecContext.Width, _codecContext.Height).AsFrame();

            int frameCounter = 0;
            int readCounter  = 0;

            while ((packet = _formatContext.ReadFrame()) != null)
            {
                if (packet.StreamIndex != 0)
                {
                    continue;
                }
                bool pic = _codecContext.DecodeVideo2(packet, frame);
                if (!pic)
                {
                    continue;
                }

                readCounter++;
                convContext.Scale(frame, convBuffer, 0, _codecContext.Height);

                var outPacket = encodeContext.EncodeVideo2(convBuffer);
                if (outPacket != null)
                {
                    frameCounter++;

                    // for checking the frames, not part of normal test
                    //var data = new byte[outPacket.Size];
                    //Marshal.Copy(outPacket.Data, data, 0, data.Length);
                    //File.WriteAllBytes((frameCounter++) + ".png", data);
                }
            }

            Assert.AreEqual(readCounter, frameCounter);
            Assert.AreEqual(245, readCounter);

            encodeContext.Close();
            _codecContext.Close();
        }
Beispiel #6
0
 public static AVCodecContext AllocContext3(AVCodec codec)
 {
     AutoGen.AVCodecContext* codecCtx = FFmpegInvoke.avcodec_alloc_context3(codec.NativeObj);
     if (codecCtx != null)
     {
         return new AVCodecContext(codecCtx);
     }
     return null;
 }
Beispiel #7
0
        public void Properties_ReturnNativeValues()
        {
            var ffmpeg = new FFmpegClient();

            var nativeCodec        = ffmpeg.FindDecoder(AVCodecID.AV_CODEC_ID_H264);
            var nativeCodecContext = new NativeAVCodecContext
            {
            };

            var codec = new AVCodec(ffmpeg, &nativeCodecContext, (NativeAVCodec *)nativeCodec);

            Assert.Equal("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10", codec.LongName);
            Assert.Equal("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (h264)", codec.ToString());
            Assert.Equal("h264", codec.Name);
            Assert.Equal(AVCodecCapabilities.DR1 | AVCodecCapabilities.Delay | AVCodecCapabilities.Threads | AVCodecCapabilities.SliceThreads, codec.Capabilities);
            Assert.True(codec.IsDecoder);
            Assert.False(codec.IsEncoder);
        }
Beispiel #8
0
        public void Id_ReturnsNativeId()
        {
            var nativeCodec = new NativeAVCodec
            {
                id = AVCodecID.AV_CODEC_ID_4XM,
            };

            var ffmpegMock   = new Mock <FFmpegClient>();
            var ffmpegClient = ffmpegMock.Object;

            var codecContext = new NativeAVCodecContext
            {
            };

            var codec = new AVCodec(ffmpegClient, &codecContext, &nativeCodec);

            Assert.Equal(AVCodecID.AV_CODEC_ID_4XM, codec.Id);
        }
Beispiel #9
0
        public void Capabilities_ReturnsNativeCapabilities()
        {
            var capabilities = AVCodecCapabilities.DR1 | AVCodecCapabilities.Delay | AVCodecCapabilities.Threads | AVCodecCapabilities.SliceThreads;

            var nativeCodec = new NativeAVCodec
            {
                capabilities = (int)capabilities,
            };

            var ffmpegMock   = new Mock <FFmpegClient>();
            var ffmpegClient = ffmpegMock.Object;

            var codecContext = new NativeAVCodecContext
            {
            };

            var codec = new AVCodec(ffmpegClient, &codecContext, &nativeCodec);

            Assert.Equal(capabilities, codec.Capabilities);
        }
Beispiel #10
0
        public void AVCodec_InitializesInstance()
        {
            var nativeCodec = new NativeAVCodec()
            {
                capabilities = (int)AVCodecCapabilities.Truncated,
            };

            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.FindDecoder(AVCodecID.AV_CODEC_ID_H264))
            .Returns((IntPtr)(&nativeCodec))
            .Verifiable();

            var codecParameters = new NativeAVCodecParameters
            {
                codec_type = NativeAVMediaType.AVMEDIA_TYPE_VIDEO,
                codec_id   = AVCodecID.AV_CODEC_ID_H264,
            };

            var nativeCodecContext = new NativeAVCodecContext()
            {
                codec_id = AVCodecID.AV_CODEC_ID_H264,
            };

#pragma warning disable CS0618 // Type or member is obsolete
            var nativeStream = new NativeAVStream
            {
                codecpar = &codecParameters,
                codec    = &nativeCodecContext,
            };

            var stream = new AVStream(&nativeStream);

            var ffmpeg = ffmpegMock.Object;
            var codec  = new AVCodec(ffmpeg, stream);

            Assert.Equal((int)AVCodecCapabilities.Truncated, stream.CodecContext->flags);
#pragma warning restore CS0618 // Type or member is obsolete
        }
Beispiel #11
0
        private object Decode <TType>(byte[] Data, AVCodec AVCodec, Action <AVCodecContext, AVPacket, TType> Action)
        {
            var context = new AVCodecContext();
            var packet  = new AVPacket();

            packet.data = Pointer <byte> .Create(new AllocatedMemory(Data));

            packet.size = Data.Length;

            context.get_buffer = (AVCodecContext, AVFrame) =>
            {
                var width  = AVCodecContext.width;
                var height = AVCodecContext.height;
                AVFrame.linesize[0] = width * 4;
                AVFrame.data[0]     = CLib.malloc(AVFrame.linesize[0] * height);
                return(0);
            };

            context.release_buffer = (AVCodecContext, AVFrame) =>
            {
                CLib.free(AVFrame.data[0]);
            };

            AVCodec.init(context);
            try
            {
                object obj = null;
                if (AVCodec.decode(context, ref obj, packet) < 0)
                {
                    throw(new Exception());
                }
                Action(context, packet, (TType)obj);
                return(obj);
            }
            finally
            {
                AVCodec.close(context);
            }
        }
Beispiel #12
0
        public void IsEncoder_ReturnsNativeIsDecoder()
        {
            var name = new byte[] { (byte)'t', (byte)'e', (byte)'s', (byte)'t' };

            fixed(byte *p = name)
            {
                var nativeCodec = new NativeAVCodec
                {
                };

                var ffmpegMock   = new Mock <FFmpegClient>();
                var ffmpegClient = ffmpegMock.Object;

                var codecContext = new NativeAVCodecContext
                {
                };

                var codec = new AVCodec(ffmpegClient, &codecContext, &nativeCodec);

                Assert.False(codec.IsDecoder);
            }
        }
Beispiel #13
0
        public void Name_ReturnsNativeName()
        {
            var name = new byte[] { (byte)'t', (byte)'e', (byte)'s', (byte)'t' };

            fixed(byte *p = name)
            {
                var nativeCodec = new NativeAVCodec
                {
                    name = p,
                };

                var ffmpegMock   = new Mock <FFmpegClient>();
                var ffmpegClient = ffmpegMock.Object;

                var codecContext = new NativeAVCodecContext
                {
                };

                var codec = new AVCodec(ffmpegClient, &codecContext, &nativeCodec);

                Assert.Equal("test", codec.Name);
            }
        }
Beispiel #14
0
        private static IEnumerable <Guid> FindVideoFormatCompatibleProfiles(VideoDevice videoDevice, AVCodec codec)
        {
            for (var i = 0; i < videoDevice.VideoDecoderProfileCount; ++i)
            {
                videoDevice.GetVideoDecoderProfile(i, out var profile);

                // TODO Check profile id

                videoDevice.CheckVideoDecoderFormat(profile, DecoderOuputFormat, out var suppported);
                if (suppported)
                {
                    yield return(profile);
                }
            }

            yield break;
        }
Beispiel #15
0
 public static bool Open2(AVCodecContext context, AVCodec codec)
 {
     return FFmpegInvoke.avcodec_open2(context.NativeObj, codec.NativeObj, null) >= 0;
 }
Beispiel #16
0
        /// <summary>
        /// Is invoked, when the application is started.
        /// </summary>
        /// <param name="args">The command line arguments, that were passed to the application.</param>
        public static void Main(string[] args)
        {
            // Initializes the Codecs and formats
            LibAVFormat.av_register_all();

            // Asks the user for a flie name to load
            Console.Write("File name: ");
            string fileName = Console.ReadLine();

            // Loads a video
            IntPtr formatContextPointer;

            if (LibAVFormat.avformat_open_input(out formatContextPointer, fileName, IntPtr.Zero, IntPtr.Zero) < 0)
            {
                Console.WriteLine($"An error occurred while opening the video: {fileName}.");
                return;
            }
            AVFormatContext formatContext = Marshal.PtrToStructure <AVFormatContext>(formatContextPointer);

            Console.WriteLine($"Opened video file {formatContext.filename}.");

            // Retrieve stream information of the video
            if (LibAVFormat.avformat_find_stream_info(formatContextPointer, IntPtr.Zero) < 0)
            {
                Console.WriteLine("An error occurred while retrieving the stream information of the video.");
                return;
            }

            // Finds the first video stream in the video
            Console.WriteLine($"Found {formatContext.nb_streams} stream(s) in the video file.");
            int videoStreamId = -1;

            for (int i = 0; i < formatContext.nb_streams; i++)
            {
                AVStream       stream       = Marshal.PtrToStructure <AVStream>(Marshal.PtrToStructure <IntPtr>(IntPtr.Add(formatContext.streams, i * IntPtr.Size)));
                AVCodecContext codecContext = Marshal.PtrToStructure <AVCodecContext>(stream.codec);
                if (codecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    videoStreamId = i;
                    break;
                }
            }
            if (videoStreamId == -1)
            {
                Console.WriteLine("No video stream found.");
                return;
            }
            AVStream       videoStream       = Marshal.PtrToStructure <AVStream>(Marshal.PtrToStructure <IntPtr>(IntPtr.Add(formatContext.streams, videoStreamId * IntPtr.Size)));
            AVCodecContext videoCodecContext = Marshal.PtrToStructure <AVCodecContext>(videoStream.codec);

            // Finds the decoder for the video stream
            IntPtr codecPointer = LibAVCodec.avcodec_find_decoder(videoCodecContext.codec_id);

            if (codecPointer == IntPtr.Zero)
            {
                Console.WriteLine("The video codec is not supported.");
                return;
            }
            AVCodec videoCodec = Marshal.PtrToStructure <AVCodec>(codecPointer);

            Console.WriteLine($"Using the {videoCodec.long_name} codec to decode the video stream.");

            // Opens the codec for the video stream
            if (LibAVCodec.avcodec_open2(videoStream.codec, codecPointer, IntPtr.Zero) < 0)
            {
                Console.WriteLine("The codec {videoCodec.long_name} could not be opened.");
                return;
            }
            Console.WriteLine("Successfully loaded codec.");

            // Allocates video frames for the original decoded frame and the frame in RGB (which is then later stored in a file)
            IntPtr framePointer    = LibAVUtil.av_frame_alloc();
            IntPtr frameRgbPointer = LibAVUtil.av_frame_alloc();

            // Determines the required buffer size and allocates the buffer for the RGB frame
            int    numBytes = LibAVCodec.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_RGB24, videoCodecContext.width, videoCodecContext.height);
            IntPtr buffer   = LibAVUtil.av_malloc(new UIntPtr((uint)(numBytes * sizeof(byte))));

            // Assigns appropriate parts of buffer to image planes in frameRgb, note that frameRgb is an AVFrame, but AVFrame is a superset of AVPicture
            LibAVCodec.avpicture_fill(frameRgbPointer, buffer, AVPixelFormat.AV_PIX_FMT_RGB24, videoCodecContext.width, videoCodecContext.height);
            AVFrame frameRgb = Marshal.PtrToStructure <AVFrame>(frameRgbPointer);

            // Cycles over all frames of the video and dumps the frames to file
            Console.WriteLine("Decoding vidoe frames...");
            int    frameIndex    = 0;
            IntPtr packetPointer = Marshal.AllocHGlobal(Marshal.SizeOf <AVPacket>());

            while (LibAVFormat.av_read_frame(formatContextPointer, packetPointer) >= 0)
            {
                AVPacket packet = Marshal.PtrToStructure <AVPacket>(packetPointer);
                if (packet.stream_index == videoStreamId)
                {
                    // Decodes video frame
                    int frameFinished = 0;
                    LibAVCodec.avcodec_decode_video2(videoStream.codec, framePointer, ref frameFinished, packetPointer);
                    AVFrame frame = Marshal.PtrToStructure <AVFrame>(framePointer);

                    // Checks if the video frame was properly decoded
                    if (frameFinished != 0)
                    {
                        // Converts the image from its native format to RGB
                        IntPtr scaleContextPointer = LibSwScale.sws_getContext(videoCodecContext.width, videoCodecContext.height, videoCodecContext.pix_fmt,
                                                                               videoCodecContext.width, videoCodecContext.height, AVPixelFormat.AV_PIX_FMT_RGB24, ScalingFlags.SWS_BILINEAR, IntPtr.Zero,
                                                                               IntPtr.Zero, IntPtr.Zero);
                        LibSwScale.sws_scale(scaleContextPointer, frame.data, frame.linesize, 0, videoCodecContext.height, frameRgb.data, frameRgb.linesize);
                        frameRgb = Marshal.PtrToStructure <AVFrame>(frameRgbPointer);

                        // Checks if this is one of the first 5 frames, if so then it is stored to disk
                        frameIndex++;
                        if (frameIndex > 24 && frameIndex <= 30)
                        {
                            Console.WriteLine($"Writing frame {frameIndex} to file...");
                            string frameFileName = Path.Combine(Path.GetDirectoryName(fileName), $"frame-{frameIndex}.ppm");
                            Program.SaveFrame(frameRgb, videoCodecContext.width, videoCodecContext.height, frameFileName);
                        }
                    }
                }

                // Frees the packet that was allocated by av_read_frame
                LibAVCodec.av_free_packet(packetPointer);
            }
            Console.WriteLine("Finished decoding of the video.");

            // Frees and closes all acquired resources
            LibAVUtil.av_free(buffer);
            LibAVUtil.av_free(frameRgbPointer);
            LibAVUtil.av_free(framePointer);
            LibAVCodec.avcodec_close(videoStream.codec);
            IntPtr formatContextPointerPointer = Marshal.AllocHGlobal(Marshal.SizeOf <IntPtr>());

            Marshal.StructureToPtr(formatContextPointer, formatContextPointerPointer, false);
            LibAVFormat.avformat_close_input(formatContextPointerPointer);
            Marshal.FreeHGlobal(formatContextPointerPointer);
            Console.WriteLine("Freed all acquired resources.");
        }