Esempio n. 1
0
        /// <summary>
        /// Create an output codec using the settings found in the VideoInputData.
        /// </summary>
        /// <param name="videoInputData">The input video settings, probably found in the source packet.</param>
        /// <returns>The ouput codec settings, RAW video codec.</returns>
        private VideoOuputData CreateOutputCodec(VideoInputData videoInputData)
        {
            // open output codec
            IntPtr pOutputCodec = _ffmpeg.AvcodecFindEncoder(CodecId.CodecIdRawvideo);

            if (pOutputCodec == IntPtr.Zero)
            {
#if DEBUG
                Log.Info("Could not load output codec.");
#endif
                return(null);
            }

            // Setup target encoding context (output settings)
            VideoOuputData videoOutputData    = new VideoOuputData(_ffmpeg, _ffmpeg.AvcodecAllocContext());
            AvCodecContext outputCodecContext = PtrToStructure <AvCodecContext>(videoOutputData.POutputCodecContext);
            outputCodecContext.Width  = videoInputData.Width;
            outputCodecContext.Height = videoInputData.Height;
            outputCodecContext.PixFmt = OutputPixelFormat;

            Marshal.StructureToPtr(outputCodecContext, videoOutputData.POutputCodecContext, false);
            if (_ffmpeg.AvcodecOpen(videoOutputData.POutputCodecContext, pOutputCodec) < 0)
            {
#if DEBUG
                Log.Info("Could not open output codec.");
#endif
                return(null);
            }
            return(videoOutputData);
        }
Esempio n. 2
0
 public VideoInputData(IFFmpeg ffmpeg, IntPtr pInputFormatContext, IntPtr pInputCodecContext, AvCodecContext videoCodecContext, int videoStartIndex)
 {
     _ffmpeg = ffmpeg;
     _pInputFormatContext = pInputFormatContext;
     _pInputCodecContext  = pInputCodecContext;
     _videoCodecContext   = videoCodecContext;
     _videoStartIndex     = videoStartIndex;
 }
Esempio n. 3
0
        /// <summary>
        /// Creates an input codec, based on the settings found in the dataPacket/headerPacket.
        /// </summary>
        /// <param name="dataPacket">Contains the frame Data.</param>
        /// <param name="headerPacket">Contains the frame header Data.</param>
        /// <returns></returns>
        private VideoInputData CreateCodecFromPacket(IResultNode dataPacket, IResultNode headerPacket)
        {
            // Load the framedata from the packet to the byte buffer
            if (!PacketDataToBuffer(dataPacket, headerPacket))
            {
#if DEBUG
                Log.Info("No frame header information is available.");
#endif
                return(null);
            }

            IntPtr pInputFormatContext;
            if (_ffmpeg.AvOpenInputFile(out pInputFormatContext, DefraserProtocolPrefix + _convertId, IntPtr.Zero, 0, IntPtr.Zero) < 0)
            {
#if DEBUG
                Log.Info("Could not open input file.");
#endif
                return(null);
            }

            if (_ffmpeg.AvFindStreamInfo(pInputFormatContext) < 0)
            {
#if DEBUG
                Log.Info("Could not find (input)stream information.");
#endif
                return(null);
            }

            VideoInputData  videoInputData = null;
            AvFormatContext formatContext  = PtrToStructure <AvFormatContext>(pInputFormatContext);
            for (int i = 0; i < formatContext.NbStreams; ++i)
            {
                AvStream       stream = PtrToStructure <AvStream>(formatContext.Streams[i]);
                AvCodecContext codec  = PtrToStructure <AvCodecContext>(stream.Codec);

                if (codec.CodecType == CodecType.CodecTypeVideo)
                {
                    AvCodecContext videoCodecContext = PtrToStructure <AvCodecContext>(stream.Codec);
                    IntPtr         pVideoCodec       = _ffmpeg.AvcodecFindDecoder(videoCodecContext.CodecId);
                    if (pVideoCodec == IntPtr.Zero)
                    {
#if DEBUG
                        Log.Info("could not find input codec");
#endif
                        return(null);
                    }

                    if (_ffmpeg.AvcodecOpen(stream.Codec, pVideoCodec) < 0)
                    {
#if DEBUG
                        Log.Info("Could not open input codec.");
#endif
                        return(null);
                    }

                    // setup object used to decode (input) frames
                    videoInputData = new VideoInputData(_ffmpeg, pInputFormatContext, stream.Codec, PtrToStructure <AvCodecContext>(stream.Codec), i);

                    break;                     // stop seaching for video stream
                }
            }

            if (videoInputData == null)
            {
#if DEBUG
                Log.Info("Could not find video stream.");
#endif
                return(null);
            }
            return(videoInputData);
        }
Esempio n. 4
0
        public static void Main(string[] args)
        {
            WrapperUtils.RegisterLibrariesPathSimple("ffmpeg-x64", "ffmpeg-x86");
            AvFormat.RegisterAll();
            AvCodec.RegisterAll();
            AvFormat.NetworkInit();

            AvFormatContext format = AvFormatContext.Allocate();

            if (!format.OpenInput(@"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4"))
            {
                throw new Exception("Failed to open file :(");
            }
            if (!format.FindStreamInfo())
            {
                throw new Exception("Failed to find stream info :(");
            }

            AvStream pStream = null;

            foreach (AvStream avStream in format.Streams)
            {
                if (avStream.Codec.Type == AvMediaType.Video)
                {
                    pStream = avStream;
                    break;
                }
            }

            if (pStream == null)
            {
                throw new Exception("Could not find video stream :(");
            }

            AvCodecContext codecContext = pStream.Codec;

            int           width        = codecContext.Width;
            int           height       = codecContext.Height;
            AvPixelFormat sourceFormat = codecContext.PixelFormat;
            AvPixelFormat targetFormat = AvPixelFormat.Bgr24;

            SwsContext convertContext = SwsContext.Get(width, height, sourceFormat, width, height, targetFormat,
                                                       SwsFlags.FastBilinear);

            if (convertContext == null)
            {
                throw new Exception("Could not initialize the conversion context");
            }

            AvFrame     convertedFrame           = AvFrame.Allocate();
            int         convertedFrameBufferSize = AvPicture.GetSize(targetFormat, width, height);
            SByteBuffer convertedFrameBuffer     = AvUtil.Malloc((ulong)convertedFrameBufferSize);

            ((AvPicture)convertedFrame).Fill(convertedFrameBuffer, targetFormat, width, height);

            AvCodec codec = AvCodec.FindDecoder(codecContext.Id);

            if (codec == null)
            {
                throw new Exception("Unsupported codec");
            }

            if (codec.HasCapability(CodecCapabilities.Truncated))
            {
                codecContext.Flags |= CodecFlags.Truncated;
            }

            if (!codecContext.Open2(codec))
            {
                throw new Exception("Could not open codec");
            }

            AvFrame frame = AvFrame.Allocate();

            AvPacket packet = AvPacket.Create();

            packet.Init();

            int frameNumber = 0;

            while (frameNumber < 500)
            {
                if (!format.ReadFrame(packet))
                {
                    throw new Exception("Could not read frame!");
                }

                if (packet.StreamIndex != pStream.Index)
                {
                    continue;
                }

                int gotPicture;
                int size = codecContext.DecodeVideo2(frame, out gotPicture, packet);
                if (size < 0)
                {
                    throw new Exception("Error while decoding frame " + frameNumber);
                }

                if (gotPicture == 1)
                {
                    Console.WriteLine($"Frame: {frameNumber}");

                    SByteBufferArray src       = frame.Data;
                    SByteBufferArray dst       = convertedFrame.Data;
                    IntArray         srcStride = frame.LineSize;
                    IntArray         dstStride = convertedFrame.LineSize;
                    convertContext.Scale(src, srcStride, 0, height, dst, dstStride);

                    int linesize = dstStride[0];
                    using (
                        Bitmap bitmap = new Bitmap(width, height, linesize,
                                                   PixelFormat.Format24bppRgb, convertedFrame.Data0))
                    {
                        bitmap.Save(@"frame.buffer." + frameNumber + ".jpg", ImageFormat.Jpeg);
                    }
                    frameNumber++;
                }
            }

            convertedFrame.Free();
            convertedFrameBuffer.Free();
            convertContext.Free();
            frame.Free();
            codecContext.Close();
            format.CloseInput();
        }