コード例 #1
0
        public virtual int init(int[] extraData)
        {
            context = MpegEncContext.avcodec_alloc_context();

            picture = AVFrame.avcodec_alloc_frame();

            packet = new AVPacket();
            packet.av_init_packet();

            if (extraData != null)
            {
                context.extradata_size = extraData.Length;
                // Add 4 additional values to avoid exceptions while parsing
                int[] extraDataPlus4 = new int[context.extradata_size + 4];
                Array.Copy(extraData, 0, extraDataPlus4, 0, context.extradata_size);
                context.extradata = extraDataPlus4;
            }

            int result = context.avcodec_open(new com.twilight.h264.decoder.H264Decoder());

            if (result < 0)
            {
                return(result);
            }

            gotPicture[0] = 0;

            return(0);
        }
コード例 #2
0
    public bool AvFrameToImageByteArray(AVFrame frame, out byte[] pngData)
    {
        AVCodec *       outCodec    = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_PNG);
        AVCodecContext *outCodecCtx = ffmpeg.avcodec_alloc_context3(outCodec);

        outCodecCtx->width         = _pCodecContext->width;
        outCodecCtx->height        = _pCodecContext->height;
        outCodecCtx->pix_fmt       = AVPixelFormat.AV_PIX_FMT_RGB24;
        outCodecCtx->codec_type    = AVMediaType.AVMEDIA_TYPE_VIDEO;
        outCodecCtx->time_base.num = _pCodecContext->time_base.num;
        outCodecCtx->time_base.den = _pCodecContext->time_base.den;

        if (ffmpeg.avcodec_open2(outCodecCtx, outCodec, null) < 0)
        {
            pngData = new byte[] { };
            return(false);
        }

        AVPacket outPacket = new AVPacket();

        ffmpeg.av_init_packet(&outPacket);
        outPacket.size = 0;
        outPacket.data = null;

        ffmpeg.avcodec_send_frame(outCodecCtx, &frame);
        ffmpeg.avcodec_receive_packet(outCodecCtx, &outPacket);

        pngData = new byte[outPacket.size];

        Marshal.Copy((IntPtr)outPacket.data, pngData, 0, outPacket.size);
        return(true);
    }
コード例 #3
0
        public bool WriteVideoFrame(Bitmap bitmap)
        {
            int             ret;
            AVCodecContext *c = videoStream.enc;
            AVFrame *       frame;
            int             got_packet = 0;
            AVPacket        pkt        = new AVPacket();

            frame = MakeVideoFrame(bitmap);

            av_init_packet(&pkt);

            ret = avcodec_encode_video2(c, &pkt, frame, &got_packet);
            if (ret < 0)
            {
                throw new FFmpegException("Error encoding video frame", ret);
            }

            if (got_packet != 0)
            {
                ret = WriteFrame(&c->time_base, videoStream.st, &pkt);
            }
            else
            {
                ret = 0;
            }

            if (ret < 0)
            {
                throw new FFmpegException("Error writing video frame", ret);
            }

            return((frame != null || got_packet != 0) ? false : true);
        }
コード例 #4
0
        protected override AVFrame DecodeFrameFromPacket(AVPacket avpkt, out int len)
        {
            len = Context.avcodec_decode_video2(picture, got_picture, avpkt);
            //Console.WriteLine(FrameCrc.GetFrameLine(avpkt));
            if (len < 0)
            {
                //ConsoleUtils.SaveRestoreConsoleColor(ConsoleColor.Red, () => { Console.WriteLine("Error while decoding frame: {0}, error: {1}", frame, this.len); });
                // Discard current packet and proceed to next packet
                return(null);
            }

            if (got_picture[0] != 0)
            {
                picture = Context.priv_data.displayPicture;

                //ConsoleUtils.SaveRestoreConsoleColor(ConsoleColor.Magenta, () => { Console.WriteLine("Generated picture"); });
                //int bufferSize = picture.imageWidth * picture.imageHeight;
                //if (buffer == null || bufferSize != buffer.Length)
                //{
                //	buffer = new int[bufferSize];
                //}
                return(picture);
            }
            else
            {
                //ConsoleUtils.SaveRestoreConsoleColor(ConsoleColor.DarkMagenta, () => { Console.WriteLine("NOT Generated picture"); });
                return(null);
            }
        }
コード例 #5
0
        protected override bool DecodePacket(ref AVPacket packet)
        {
            // decode video frame
            bool frameFinished = false;
            int  byteCount     = FFmpeg.avcodec_decode_video2(ref m_avCodecCtx, m_avFrame, out frameFinished, ref packet);

            if (byteCount < 0)
            {
                throw new DecoderException("Couldn't decode frame");
            }

            // copy data into our managed buffer
            if (m_avFrame->data[0] == IntPtr.Zero)
            {
                m_bufferUsedSize = 0;
            }
            else
            {
                m_bufferUsedSize = FFmpeg.avpicture_layout((AVPicture *)m_avFrame, PixelFormat, Width, Height, m_buffer, m_buffer.Length);
            }

            if (m_bufferUsedSize < 0)
            {
                throw new DecoderException("Error copying decoded frame into managed memory");
            }

            return(frameFinished);
        }
コード例 #6
0
 /// <summary>
 /// Free a packet
 /// </summary>
 /// <param name="pAVPacket">packet to free</param>
 /// <remarks>
 /// Inline function defined in avformat.h, thus its code must be duplicated here since inline functions
 /// cannot be DllImported
 /// </remarks>
 public static void av_free_packet(ref AVPacket packet)
 {
     if (packet.destruct != null)
     {
         packet.destruct(ref packet);
     }
 }
コード例 #7
0
            public AVPacket getPacket()
            {
                AVPacket packet = packetPool[get++];

                get = get % 60;
                return(packet);
            }
コード例 #8
0
ファイル: MediaStream.cs プロジェクト: ispysoftware/iSpy
        public void CheckSleep(AVPacket packet, long pts, AVStream *refStream)
        {
            if (packet.stream_index == refStream->index)
            {
                if (pts > 0)
                {
                    if (_firstTimestamp == 0)
                    {
                        _start          = DateTime.UtcNow;
                        _firstTimestamp = pts;
                    }

                    if (!_checkSleep)
                    {
                        _clock.WaitOne(10); //don't hammer it
                        return;
                    }

                    var ratio   = (Convert.ToDouble(refStream->time_base.num) / Convert.ToDouble(refStream->time_base.den)) * 1000;
                    var ptsbase = pts * ratio;
                    var ptsref  = _firstTimestamp * ratio;

                    var ms      = (DateTime.UtcNow - _start).TotalMilliseconds;
                    int msSleep = (int)Math.Max(0, ((ptsbase - ptsref) / 1) - ms);

                    if (msSleep > 0)
                    {
                        _clock.WaitOne(msSleep);
                    }
                }
            }
        }
コード例 #9
0
        public unsafe int ReadNextFrame(out double seconds, ref byte[] buffer)
        {
            var      stream = _formatContext.SelectedStream.Stream;
            AVPacket packet = new AVPacket();

            FfmpegCalls.InitPacket(&packet);
            int offset = 0;

            do
            {
                try
                {
                    packet.data = null;
                    packet.size = 0;

                    seconds = 0;

                    if (!FfmpegCalls.AvReadFrame(_formatContext, &packet))
                    {
                        break;
                    }
                    //check whether we've picked some other stream (video,...)
                    if (packet.stream_index != _formatContext.SelectedStream.Stream.index)
                    {
                        continue;
                    }

                    seconds = packet.pts * stream.time_base.num / (double)stream.time_base.den;
                    do
                    {
                        int bytesConsumed;
                        try
                        {
                            int bufferLength = DecodePacket(ref buffer, offset, &packet, out bytesConsumed);
                            if (bufferLength == 0)
                            {
                                break;
                            }

                            offset += bufferLength;
                        }
                        catch (FfmpegException)
                        {
                            break;
                        }

                        packet.data += bytesConsumed;
                        packet.size -= bytesConsumed;
                    } while (packet.size > 0);
                }
                finally
                {
                    FfmpegCalls.FreePacket(&packet);
                }
            } while (offset <= 0);

            return(offset);
        }
コード例 #10
0
 public static bool DecodeVideo2(AVCodecContext codecContext, AVFrame frame, out bool frameFinished, AVPacket packet)
 {
     int ffNum = 0;
     bool ok =
         FFmpegInvoke.avcodec_decode_video2(codecContext.NativeObj, frame.NativeObj, &ffNum, packet.NativeObj) >=
         0;
     frameFinished = ffNum > 0;
     return ok;
 }
コード例 #11
0
ファイル: AV.cs プロジェクト: nbomeroglu37/FFmpeg.Wrapper
        public static void FreePacket(AVPacket packet)
        {
            FFmpegInvoke.av_free_packet(packet.NativeObj);

            if (packet.Handle != null)
            {
                packet.Handle.Free();
            }
        }
コード例 #12
0
        public static AvPacket Create()
        {
            AVPacket native = new AVPacket();
            GCHandle handle = GCHandle.Alloc(native, GCHandleType.Pinned);

            return(new AvPacket((AVPacket *)handle.AddrOfPinnedObject().ToPointer())
            {
                _handle = handle
            });
        }
コード例 #13
0
            public void clear()
            {
                for (int i = get; i != put; i = (i + 1) % pSize)
                {
                    AVPacket packet = packetPool[i];
                    ffmpeg.av_packet_unref(&packet);
                }

                get = 0;
                put = 0;
            }
コード例 #14
0
        /// <summary>
        /// Pulls the next-available frame. This does not queue the frame in either the video or audio queue.
        /// Please keep in mind that you will need to manually call the Release() method the returned object
        /// are done with it. If working with Media Caches, the cache will automatically release the frame
        /// </summary>
        /// <returns></returns>
        /// <exception cref="System.Exception">Error while decoding frame</exception>
        private FFmpegMediaFrame PullMediaFrame()
        {
            // Setup the holding packet
            var readingPacket = new AVPacket();

            ffmpeg.av_init_packet(&readingPacket);
            var readFrameResult = Constants.SuccessCode;
            FFmpegMediaFrame mediaFrameToReturn = null;
            var emptyPacket     = false;
            var receivedFrame   = false;
            var attemptDecoding = false;
            var isVideoPacket   = false;
            var isAudioPacket   = false;

            while (readFrameResult == Constants.SuccessCode || readFrameResult == Constants.EndOfFileErrorCode)
            {
                readFrameResult = ffmpeg.av_read_frame(InputFormatContext, &readingPacket);
                emptyPacket     = readFrameResult == Constants.EndOfFileErrorCode;
                attemptDecoding = (readFrameResult >= Constants.SuccessCode || readFrameResult == Constants.EndOfFileErrorCode);
                isVideoPacket   = HasVideo && readingPacket.stream_index == InputVideoStream->index;
                isAudioPacket   = HasAudio && readingPacket.stream_index == InputAudioStream->index;

                if (attemptDecoding)
                {
                    if (isVideoPacket)
                    {
                        receivedFrame = this.FillDecodedPictureHolderFrame(&readingPacket, emptyPacket);
                        if (receivedFrame)
                        {
                            mediaFrameToReturn = CreateMediaFrameFromDecodedPictureHolder();
                            break;
                        }
                    }
                    else if (isAudioPacket)
                    {
                        receivedFrame = this.FillDecodedWaveHolderFrame(&readingPacket, emptyPacket);
                        if (receivedFrame)
                        {
                            mediaFrameToReturn = CreateMediaFrameFromDecodedWaveHolder();
                            break;
                        }
                    }
                }

                if (receivedFrame == false && readFrameResult == Constants.EndOfFileErrorCode)
                {
                    mediaFrameToReturn = null;
                    break;
                }
            }

            IsAtEndOfStream = readFrameResult == Constants.EndOfFileErrorCode && mediaFrameToReturn == null;
            return(mediaFrameToReturn);
        }
コード例 #15
0
        public void TestReadFrame()
        {
            int      frames = 0;
            AVPacket packet = _context.ReadFrame();

            while ((packet = _context.ReadFrame()) != null)
            {
                frames++;
            }
            Assert.AreEqual(680, frames);
        }
コード例 #16
0
        public void Parameters_ReturnNativeValues()
        {
            var data = new byte[] { (byte)'t', (byte)'e', (byte)'s', (byte)'t' };

            var nativePacket = new NativeAVPacket()
            {
            };

            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.AllocPacket())
            .Returns((IntPtr)(&nativePacket));

            ffmpegMock
            .Setup(c => c.InitPacket(It.IsAny <IntPtr>()))
            .Callback <IntPtr>(p =>
            {
                fixed(byte *dataPtr = data)
                {
                    var handle           = (NativeAVPacket *)p;
                    handle->stream_index = 10;
                    handle->size         = 11;
                    handle->dts          = 12;
                    handle->duration     = 13;
                    handle->flags        = (int)AVPacketFlags.Discard;
                    handle->pos          = 14;
                    handle->pts          = 15;
                    handle->data         = dataPtr;
                }
            })
            .Verifiable();

            ffmpegMock
            .Setup(c => c.UnrefPacket(It.IsAny <AVPacket>()))
            .Verifiable();

            var ffmpegClient = ffmpegMock.Object;

            using (var packet = new AVPacket(ffmpegClient))
            {
                Assert.Equal(10, packet.StreamIndex);
                Assert.Equal(11, packet.Size);
                Assert.Equal(12, packet.DecompressionTimestamp);
                Assert.Equal(13, packet.Duration);
                Assert.Equal(AVPacketFlags.Discard, packet.Flags);
                Assert.Equal(14, packet.Position);
                Assert.Equal(15, packet.PresentationTimestamp);
                Assert.Equal((byte)'t', *packet.Data);
                Assert.Equal((byte)'e', *(packet.Data + 1));
                Assert.Equal((byte)'s', *(packet.Data + 2));
                Assert.Equal((byte)'t', *(packet.Data + 3));
            }
        }
コード例 #17
0
        public bool TryDecode(ref AVPacket packet, ref AVFrame frame)
        {
            int gotPicture;

            fixed(AVPacket *pPacket = &packet)
            fixed(AVFrame * pFrame = &frame)
            {
                int decodedSize = FFmpegInvoke.avcodec_decode_video2(_pDecodingContext, pFrame, &gotPicture, pPacket);

                if (decodedSize < 0)
                {
                    Trace.TraceWarning("Error while decoding frame.");
                }
            }
            return(gotPicture == 1);
        }
コード例 #18
0
        public bool TryDecode(ref AVPacket packet, out AVFrame *pFrame)
        {
            int gotPicture;

            fixed(AVPacket *pPacket = &packet)
            {
                int decodedSize = FFmpegInvoke.avcodec_decode_video2(codec_context, avFrame, &gotPicture, pPacket);

                if (decodedSize < 0)
                {
                    Console.WriteLine("Error while decoding frame.");
                }
            }

            pFrame = avFrame;
            return(gotPicture == 1);
        }
コード例 #19
0
ファイル: VideoDecoder.cs プロジェクト: khan990/AR.Drone
        public bool TryDecode(ref AVPacket packet, out AVFrame *pFrame)
        {
            int gotPicture;

            fixed(AVPacket *pPacket = &packet)
            {
                int decodedSize = ffmpeg.avcodec_decode_video2(_pDecodingContext, _pFrame, &gotPicture, pPacket);

                if (decodedSize < 0)
                {
                    Trace.TraceWarning("Error while decoding frame.");
                }
            }

            pFrame = _pFrame;
            return(gotPicture == 1);
        }
コード例 #20
0
        public void ReadFrame_AVERROR_EOF_ReturnsFalse()
        {
            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.ReadFrame(It.IsAny <AVFormatContext>(), It.IsAny <AVPacket>()))
            .Returns(NativeFFmpeg.AVERROR_EOF)
            .Verifiable();

            var ffmpegClient = ffmpegMock.Object;

            using (var packet = new AVPacket(ffmpegClient))
            {
                Assert.False(packet.ReadFrame(new AVFormatContext()));
            }

            ffmpegMock.Verify();
        }
コード例 #21
0
        public void ReadFrame_ReadsFrame()
        {
            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.ReadFrame(It.IsAny <AVFormatContext>(), It.IsAny <AVPacket>()))
            .Returns(0)
            .Verifiable();

            var ffmpegClient = ffmpegMock.Object;

            using (var packet = new AVPacket(ffmpegClient))
            {
                Assert.True(packet.ReadFrame(new AVFormatContext()));
            }

            ffmpegMock.Verify();
        }
コード例 #22
0
        public static string GetFrameLine(AVPacket pkt)
        {
            //pkt.data
            uint crc = BitConverter.ToUInt32(
                new CRC32().ComputeHash(pkt.data_base.Select(Item => (byte)Item).ToArray(), pkt.data_offset, pkt.size),
                0);

            //snprintf(buf, sizeof(buf), "%d, %10"PRId64", %10"PRId64", %8d, %8d, 0x%08x",
            //		 pkt->stream_index, pkt->dts, pkt->pts, pkt->duration, pkt->size, crc);
            //if (pkt->flags != AV_PKT_FLAG_KEY)
            //	av_strlcatf(buf, sizeof(buf), ", F=0x%0X", pkt->flags);
            //if (pkt->side_data_elems)
            //	av_strlcatf(buf, sizeof(buf), ", S=%d", pkt->side_data_elems);
            //av_strlcatf(buf, sizeof(buf), "\n");
            //avio_write(s->pb, buf, strlen(buf));
            //avio_flush(s->pb);
            return($"{pkt.stream_index}, {pkt.dts}, {pkt.pts}, {pkt.duration}, {pkt.size}, 0x{crc:X8}");
        }
コード例 #23
0
ファイル: FFmpegContext.cs プロジェクト: xxami/Pulsus
        public void WriteFrame(byte[] data)
        {
            Marshal.Copy(data, 0, (IntPtr)frame->data0, data.Length);

            AVPacket packet = new AVPacket();

            packet.data = null;
            packet.size = 0;

            AVFrame *inputFrame = frame;

            if (convertedFrame != null)
            {
                if (!ConvertFrame())
                {
                    throw new ApplicationException("Failed to convert frame");
                }
                inputFrame = convertedFrame;
            }

            int gotOutput = 0;

            if (type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            {
                if (ffmpeg.avcodec_encode_video2(codecContext, &packet, inputFrame, &gotOutput) < 0)
                {
                    throw new ApplicationException("Failed to encode video");
                }
            }
            else if (type == AVMediaType.AVMEDIA_TYPE_AUDIO)
            {
                if (ffmpeg.avcodec_encode_audio2(formatContext->streams[streamIndex]->codec, &packet, inputFrame, &gotOutput) < 0)
                {
                    throw new ApplicationException("Failed to encode audio");
                }
            }

            if (gotOutput > 0)
            {
                packet.dts = packet.pts = 0;
                if (ffmpeg.av_interleaved_write_frame(formatContext, &packet) < 0)
                    throw new ApplicationException("Failed av_interleaved_write_frame"); }
            }
コード例 #24
0
        public Decoder()
        {
            InitializeComponent();

            Init.Initialize();

            _decoder   = new VideoDecoder();
            _avPacket  = new AVPacket();
            _converter = new VideoConverter(AVPixelFormat.AV_PIX_FMT_BGR24);

            _socket   = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
            _endPoint = new IPEndPoint(IPAddress.Any, 1234);
            _socket.Bind(_endPoint);
            _socketThread = new Thread(SocketThread)
            {
                IsBackground = true
            };
            _socketThread.Start();
        }
コード例 #25
0
        private void EncodeAndWritePacket()
        {
            byte[] frameBuffer = new byte[FrameSize];
            m_buffer.Read(frameBuffer, 0, frameBuffer.Length);

            fixed(byte *pcmSamples = frameBuffer)
            {
                if (m_disposed)
                {
                    throw new ObjectDisposedException(this.ToString());
                }

                AVPacket outPacket = new AVPacket();

                FFmpeg.av_init_packet(ref outPacket);

                byte[] buffer = new byte[FFmpeg.FF_MIN_BUFFER_SIZE];
                fixed(byte *encodedData = buffer)
                {
                    try
                    {
                        outPacket.size         = FFmpeg.avcodec_encode_audio(ref m_avCodecCtx, encodedData, FFmpeg.FF_MIN_BUFFER_SIZE, (short *)pcmSamples);
                        outPacket.pts          = m_avCodecCtx.coded_frame->pts;
                        outPacket.flags       |= FFmpeg.PKT_FLAG_KEY;
                        outPacket.stream_index = m_avStream.index;
                        outPacket.data         = (IntPtr)encodedData;

                        if (outPacket.size > 0)
                        {
                            if (FFmpeg.av_write_frame(ref m_avFormatCtx, ref outPacket) != 0)
                            {
                                throw new IOException("Error while writing encoded audio frame to file");
                            }
                        }
                    }
                    finally
                    {
                        FFmpeg.av_free_packet(ref outPacket);
                    }
                }
            }
        }
コード例 #26
0
ファイル: VideoDecoder.cs プロジェクト: robertohj/AR.Drone
        public bool TryDecode(ref byte[] data, out AVFrame frame)
        {
            int gotPicture;

            frame = new AVFrame();
            fixed(byte *pData = &data[0])
            fixed(AVFrame * pFrame = &frame)
            {
                var packet = new AVPacket {
                    data = pData, size = data.Length
                };
                int decodedSize = FFmpegInvoke.avcodec_decode_video2(_pDecodingContext, pFrame, &gotPicture, &packet);

                if (decodedSize < 0)
                {
                    Trace.TraceWarning("Error while decoding frame.");
                }
            }
            return(gotPicture == 1);
        }
コード例 #27
0
ファイル: FFmpegMedia.cs プロジェクト: Beefr/xenko-wd
        public int ExtractFrames([NotNull] FFmpegStream stream, int count)
        {
            FFmpegUtils.EnsurePlatformSupport();
            if (isDisposed)
            {
                throw new ObjectDisposedException(nameof(FFmpegMedia));
            }
            if (!IsOpen)
            {
                // TODO: log?
                throw new InvalidOperationException(@"Media isn't open.");
            }

            var codecContext = *stream.AVStream->codec;
            var streamInfo   = GetStreamInfo(stream);

            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)streamInfo.Image.Buffer, DestinationPixelFormat, codecContext.width, codecContext.height, 1);
            streamInfo.Image.Linesize = dstLinesize[0];

            var extractedFrameCount = 0;

            var packet  = new AVPacket();
            var pPacket = &packet;

            ffmpeg.av_init_packet(pPacket);

            for (int i = 0; i < count; i++)
            {
                var extractionStatus = ExtractNextImage(streamInfo, pPacket, stream.AVStream, dstData, dstLinesize);
                streamInfo.ReachedEnd = extractionStatus == FrameExtractionStatus.ReachEOF;
                if (extractionStatus == FrameExtractionStatus.Succeeded)
                {
                    ++extractedFrameCount;
                }
            }

            return(extractedFrameCount);
        }
コード例 #28
0
        protected override bool DecodePacket(ref AVPacket packet)
        {
            int totalOutput = 0;

            // Copy the data pointer to we can muck with it
            int   packetSize = packet.size;
            byte *packetData = (byte *)packet.data;

            // May be necessary to loop multiple times if more than one frame is in the compressed packet
            fixed(byte *pBuffer = m_buffer)
            do
            {
                if (m_disposed)
                {
                    m_bufferUsedSize = 0;
                    return(false);
                }

                int outputBufferUsedSize = m_buffer.Length - totalOutput; //Must be initialized before sending in as per docs

                short *pcmWritePtr = (short *)(pBuffer + totalOutput);

                int usedInputBytes = FFmpeg.avcodec_decode_audio2(ref m_avCodecCtx, pcmWritePtr, ref outputBufferUsedSize, packetData, packetSize);

                if (usedInputBytes < 0) //Error in packet, ignore packet
                {
                    break;
                }

                if (outputBufferUsedSize > 0)
                {
                    totalOutput += outputBufferUsedSize;
                }

                packetData += usedInputBytes;
                packetSize -= usedInputBytes;
            }while (packetSize > 0);

            m_bufferUsedSize = totalOutput;
            return(true);
        }
コード例 #29
0
        public void ReadFrame_AVError_Throws()
        {
            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.ReadFrame(It.IsAny <AVFormatContext>(), It.IsAny <AVPacket>()))
            .Returns(-100)
            .Verifiable();
            ffmpegMock
            .Setup(c => c.ThrowOnAVError(-100, false))
            .Verifiable();

            var ffmpegClient = ffmpegMock.Object;

            using (var packet = new AVPacket(ffmpegClient))
            {
                Assert.False(packet.ReadFrame(new AVFormatContext()));
            }

            ffmpegMock.Verify();
        }
コード例 #30
0
        private object Decode <TType>(byte[] Data, AVCodec AVCodec, Action <AVCodecContext, AVPacket, TType> Action)
        {
            var context = new AVCodecContext();
            var packet  = new AVPacket();

            packet.data = Pointer <byte> .Create(new AllocatedMemory(Data));

            packet.size = Data.Length;

            context.get_buffer = (AVCodecContext, AVFrame) =>
            {
                var width  = AVCodecContext.width;
                var height = AVCodecContext.height;
                AVFrame.linesize[0] = width * 4;
                AVFrame.data[0]     = CLib.malloc(AVFrame.linesize[0] * height);
                return(0);
            };

            context.release_buffer = (AVCodecContext, AVFrame) =>
            {
                CLib.free(AVFrame.data[0]);
            };

            AVCodec.init(context);
            try
            {
                object obj = null;
                if (AVCodec.decode(context, ref obj, packet) < 0)
                {
                    throw(new Exception());
                }
                Action(context, packet, (TType)obj);
                return(obj);
            }
            finally
            {
                AVCodec.close(context);
            }
        }
コード例 #31
0
        internal void EnqueueNextPacket()
        {
            AVPacket packet = new AVPacket();

            FFmpeg.av_init_packet(ref packet);

            if (FFmpeg.av_read_frame(ref FormatContext, ref packet) < 0)
            {
                throw new System.IO.EndOfStreamException();
            }

            DecoderStream dest = null;

            if (m_streams.TryGetValue(packet.stream_index, out dest))
            {
                dest.PacketQueue.Enqueue(packet);
            }
            else
            {
                FFmpeg.av_free_packet(ref packet);
            }
        }
コード例 #32
0
        public void Constuctor_InitializesInstance()
        {
            IntPtr createdHandle = default(IntPtr);
            var    ffmpegMock    = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.InitPacket(It.IsAny <IntPtr>()))
            .Callback <IntPtr>(p => createdHandle = p)
            .Verifiable();

            ffmpegMock
            .Setup(c => c.UnrefPacket(It.IsAny <AVPacket>()))
            .Verifiable();

            var ffmpegClient = ffmpegMock.Object;

            using (var packet = new AVPacket(ffmpegClient))
            {
                Assert.Equal(createdHandle, packet.Handle);
                Assert.Equal((int)createdHandle.ToPointer(), (int)packet.NativeObject);
            }

            ffmpegMock.Verify();
        }
コード例 #33
0
ファイル: Program.cs プロジェクト: renanyoy/FFmpeg.AutoGen
		private static unsafe void Main(string[] args)
		{
            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
                case PlatformID.Win32NT:    
                case PlatformID.Win32S:
                case PlatformID.Win32Windows:
                    string ffmpegPath = string.Format(@"../../../FFmpeg/bin/windows/{0}", Environment.Is64BitProcess ? "x64" : "x86");
                    InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                    break;
                case PlatformID.Unix:
                case PlatformID.MacOSX:
                    string libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                    InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                    break;
            }
            
            // decode 100 frame from url or path

            //string url = @"../../sample_mpeg4.mp4";
            string url = @"http://hubblesource.stsci.edu/sources/video/clips/details/images/centaur_1.mpg";

			FFmpegInvoke.av_register_all();
			FFmpegInvoke.avcodec_register_all();
			FFmpegInvoke.avformat_network_init();


			AVFormatContext* pFormatContext = FFmpegInvoke.avformat_alloc_context();
			if (FFmpegInvoke.avformat_open_input(&pFormatContext, url, null, null) != 0)
				throw new Exception("Could not open file");

            if (FFmpegInvoke.avformat_find_stream_info(pFormatContext, null) != 0)
				throw new Exception("Could not find stream info");

			AVStream* pStream = null;
			for (int i = 0; i < pFormatContext->nb_streams; i++)
			{
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
				{
                    pStream = pFormatContext->streams[i];
					break;
				}
			}
			if (pStream == null)
				throw new Exception("Could not found video stream");

			AVCodecContext codecContext = *(pStream->codec);
			int width = codecContext.width;
			int height = codecContext.height;
			AVPixelFormat sourcePixFmt = codecContext.pix_fmt;
			AVCodecID codecId = codecContext.codec_id;
			var convertToPixFmt = AVPixelFormat.PIX_FMT_BGR24;
			SwsContext* pConvertContext = FFmpegInvoke.sws_getContext(width, height, sourcePixFmt,
			                                                           width, height, convertToPixFmt,
			                                                           FFmpegInvoke.SWS_FAST_BILINEAR, null, null, null);
			if (pConvertContext == null)
				throw new Exception("Could not initialize the conversion context");

			var pConvertedFrame = (AVPicture*)FFmpegInvoke.avcodec_alloc_frame();
			int convertedFrameBufferSize = FFmpegInvoke.avpicture_get_size(convertToPixFmt, width, height);
			var pConvertedFrameBuffer = (byte*)FFmpegInvoke.av_malloc((uint) convertedFrameBufferSize);
			FFmpegInvoke.avpicture_fill(pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

			AVCodec* pCodec = FFmpegInvoke.avcodec_find_decoder(codecId);
			if (pCodec == null)
				throw new Exception("Unsupported codec");

            // Reusing codec context from stream info, 
            // as an alternative way it could look like this: (but it works not for all kind of codecs)
			// AVCodecContext* pCodecContext = FFmpegInvoke.avcodec_alloc_context3(pCodec);
            AVCodecContext* pCodecContext = &codecContext;

			if ((pCodec->capabilities & FFmpegInvoke.CODEC_CAP_TRUNCATED) == FFmpegInvoke.CODEC_CAP_TRUNCATED)
				pCodecContext->flags |= FFmpegInvoke.CODEC_FLAG_TRUNCATED;

			if (FFmpegInvoke.avcodec_open2(pCodecContext, pCodec, null) < 0)
				throw new Exception("Could not open codec");

			AVFrame* pDecodedFrame = FFmpegInvoke.avcodec_alloc_frame();

			var packet = new AVPacket();
			AVPacket* pPacket = &packet;
			FFmpegInvoke.av_init_packet(pPacket);

			int frameNumber = 0;
			while (frameNumber < 100)
			{
				if (FFmpegInvoke.av_read_frame(pFormatContext, pPacket) < 0)
					throw new Exception("Could not read frame");

				if (pPacket->stream_index != pStream->index)
					continue;

                int gotPicture = 0;
				int size = FFmpegInvoke.avcodec_decode_video2(pCodecContext, pDecodedFrame, &gotPicture, pPacket);
				if (size < 0)
					throw new Exception(string.Format("Error while decoding frame {0}", frameNumber));

				if (gotPicture == 1)
				{
                    Console.WriteLine("frame: {0}", frameNumber);

					byte** src = &pDecodedFrame->data_0;
					byte** dst = &pConvertedFrame->data_0;
					FFmpegInvoke.sws_scale(pConvertContext, src, pDecodedFrame->linesize, 0,
					                        height, dst, pConvertedFrame->linesize);

					byte* convertedFrameAddress = pConvertedFrame->data_0;

					var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    int linesize = pConvertedFrame->linesize[0];
                    using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
					{
						bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
					}

                    frameNumber++;
				}
			}

			FFmpegInvoke.av_free(pConvertedFrame);
			FFmpegInvoke.av_free(pConvertedFrameBuffer);
			FFmpegInvoke.sws_freeContext(pConvertContext);

			FFmpegInvoke.av_free(pDecodedFrame);
			FFmpegInvoke.avcodec_close(pCodecContext);
			FFmpegInvoke.avformat_close_input(&pFormatContext);
		}
コード例 #34
0
ファイル: Program.cs プロジェクト: Ruslan-B/FFmpeg.AutoGen
        private static unsafe void Main(string[] args)
        {
            Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
            Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");

            // register path to ffmpeg
            switch (Environment.OSVersion.Platform)
            {
                case PlatformID.Win32NT:
                case PlatformID.Win32S:
                case PlatformID.Win32Windows:
                    var ffmpegPath = $@"../../../../FFmpeg/bin/{(Environment.Is64BitProcess ? @"x64" : @"x86")}";
                    InteropHelper.RegisterLibrariesSearchPath(ffmpegPath);
                    break;
                case PlatformID.Unix:
                case PlatformID.MacOSX:
                    var libraryPath = Environment.GetEnvironmentVariable(InteropHelper.LD_LIBRARY_PATH);
                    InteropHelper.RegisterLibrariesSearchPath(libraryPath);
                    break;
            }

            // decode 100 frame from url or path

            //string url = @"../../sample_mpeg4.mp4";
            var url = @"http://www.quirksmode.org/html5/videos/big_buck_bunny.mp4";

            ffmpeg.av_register_all();
            ffmpeg.avcodec_register_all();
            ffmpeg.avformat_network_init();


            Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");

            var pFormatContext = ffmpeg.avformat_alloc_context();
            if (ffmpeg.avformat_open_input(&pFormatContext, url, null, null) != 0)
            {
                throw new ApplicationException(@"Could not open file");
            }

            if (ffmpeg.avformat_find_stream_info(pFormatContext, null) != 0)
            {
                throw new ApplicationException(@"Could not find stream info");
            }

            AVStream* pStream = null;
            for (var i = 0; i < pFormatContext->nb_streams; i++)
            {
                if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pFormatContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream");
            }


            var codecContext = *pStream->codec;
           
            Console.WriteLine($"codec name: { ffmpeg.avcodec_get_name(codecContext.codec_id)}");
            
            var width = codecContext.width;
            var height = codecContext.height;
            var sourcePixFmt = codecContext.pix_fmt;
            var codecId = codecContext.codec_id;
            var convertToPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
            var pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt,
                width, height, convertToPixFmt,
                ffmpeg.SWS_FAST_BILINEAR, null, null, null);
            if (pConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context");
            }

            var pConvertedFrame = ffmpeg.av_frame_alloc();
            var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(convertToPixFmt, width, height, 1);
            var pConvertedFrameBuffer = (sbyte*)ffmpeg.av_malloc((ulong)convertedFrameBufferSize);
            ffmpeg.avpicture_fill((AVPicture*)pConvertedFrame, pConvertedFrameBuffer, convertToPixFmt, width, height);

            var pCodec = ffmpeg.avcodec_find_decoder(codecId);
            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec");
            }

            // Reusing codec context from stream info, initally it was looking like this: 
            // AVCodecContext* pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec); // but it is not working for all kind of codecs
            var pCodecContext = &codecContext;

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
            {
                throw new ApplicationException(@"Could not open codec");
            }

            var pDecodedFrame = ffmpeg.av_frame_alloc();

            var packet = new AVPacket();
            var pPacket = &packet;
            ffmpeg.av_init_packet(pPacket);

            var frameNumber = 0;
            while (frameNumber < 1400)
            {
                if (ffmpeg.av_read_frame(pFormatContext, pPacket) < 0)
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);

                    throw new ApplicationException(@"Could not read frame");
                }

                if (pPacket->stream_index != pStream->index)
                {
                    continue;
                }

                if (ffmpeg.avcodec_send_packet(pCodecContext, pPacket) < 0)
                {
                    ffmpeg.av_packet_unref(pPacket);
                    ffmpeg.av_frame_unref(pDecodedFrame);

                    throw new ApplicationException($@"Error while sending packet {frameNumber}");
                }

                if (ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame) < 0)
                {
                    ffmpeg.av_frame_unref(pDecodedFrame);
                    throw new ApplicationException($@"Error while receiving frame {frameNumber}");
                }

                ffmpeg.av_packet_unref(pPacket);

                Console.WriteLine($@"frame: {frameNumber}");

                var src = &pDecodedFrame->data0;
                var dst = &pConvertedFrame->data0;
                var srcStride = pDecodedFrame->linesize;
                var dstStride = pConvertedFrame->linesize;
                ffmpeg.sws_scale(pConvertContext, src, srcStride, 0, height, dst, dstStride);

                var convertedFrameAddress = pConvertedFrame->data0;

                var imageBufferPtr = new IntPtr(convertedFrameAddress);

                var linesize = dstStride[0];
                using (var bitmap = new Bitmap(width, height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
                {
                    bitmap.Save(@"frame.buffer.jpg", ImageFormat.Jpeg);
                }

                ffmpeg.av_frame_unref(pDecodedFrame);
                frameNumber++;
            }

            ffmpeg.av_free(pConvertedFrame);
            ffmpeg.av_free(pConvertedFrameBuffer);
            ffmpeg.sws_freeContext(pConvertContext);

            ffmpeg.av_free(pDecodedFrame);
            ffmpeg.avcodec_close(pCodecContext);
            ffmpeg.avformat_close_input(&pFormatContext);
        }
コード例 #35
0
ファイル: AV.cs プロジェクト: nbomeroglu37/FFmpeg.Wrapper
 public static bool ReadFrame(AVFormatContext context, AVPacket packet)
 {
     return FFmpegInvoke.av_read_frame(context.NativeObj, packet.NativeObj) >= 0;
 }
コード例 #36
0
        public unsafe void code()
        {

            //
            //            AVFormatContext* context = FFmpegInvoke.avformat_alloc_context();
            //            FFmpegInvoke.avformat_open_input(context, "", null, null);
            string path = "./vid1.mpg";


            //Load file
            AVFormatContext formatContext = AVFormat.OpenInput("./vid1.mpg");
            AVFormat.FindStreamInfo(formatContext);

           

            //Find video streams
            AVStream videoStream1 = null;
            foreach (AVStream stream in formatContext.Streams)
            {
                Console.WriteLine("Stream CodecType " + stream.CodecContext.CodecType);
                if (stream.CodecContext.CodecType == AVMediaType.Video)
                {
                    Console.WriteLine("Found video stream");
                    videoStream1 = stream;
                }
            }

            if (videoStream1 == null)
            {
                Console.WriteLine("No video stream found!");
                return;
            }

            


            AVCodecContext origCodecCtx1 = videoStream1.CodecContext;
            AVCodec codec1 = AVCodec.FindDecoder(origCodecCtx1.CodecId);

            if (codec1 == null)
            {
                Console.WriteLine("Unsupported codec!");
                return;
            }

            AVCodecContext codecCtx1 = AVCodec.AllocContext3(codec1);
            if (!AVCodec.CopyContext(codecCtx1, origCodecCtx1))
            {
                Console.WriteLine("Failed to copy codec");
                return;
            }

            if (!AVCodec.Open2(codecCtx1, codec1))
            {
                Console.WriteLine("Failed to open codec!");
                return;
            }

            AVFrame frame1 = AVFrame.Alloc();
            AVFrame frameBGR1 = AVFrame.Alloc();

            int bufferSize = AVPicture.GetSize(AutoGen.AVPixelFormat.PIX_FMT_BGR24, codecCtx1.Width, codecCtx1.Height);

            SByteBuffer buffer1 = AV.Malloc(bufferSize);

            

            AVPicture.Fill(frameBGR1, buffer1, AutoGen.AVPixelFormat.PIX_FMT_BGR24, codecCtx1.Width, codecCtx1.Height);

            SWSContext swsCtx1 = SWS.GetContext(codecCtx1.Width, codecCtx1.Height, codecCtx1.PixelFormat, codecCtx1.Width,
                codecCtx1.Height, AutoGen.AVPixelFormat.PIX_FMT_BGR24, AutoGen.FFmpegInvoke.SWS_FAST_BILINEAR);

            


            AutoGen.AVFormatContext* formatCtx = formatContext.NativeObj;
            AutoGen.AVStream* videoStream = videoStream1.NativeObj;
            AutoGen.AVCodecContext* origCodecCtx = origCodecCtx1.NativeObj;
            AutoGen.AVCodec* codec = codec1.NativeObj;
            AutoGen.AVCodecContext* codecCtx = codecCtx1.NativeObj;
            AutoGen.AVFrame* frame = frame1.NativeObj;
            AutoGen.AVFrame* frameBGR = frameBGR1.NativeObj;
            byte* buffer = buffer1.NativeObj;
            AutoGen.SwsContext* swsCtx = swsCtx1.NativeObj;

//            AutoGen.AVPacket packet;

            AVPacket packet1 = new AVPacket();

            int frameId = 0;
            while (AV.ReadFrame(formatContext, packet1))
            {
                AutoGen.AVPacket packet = *packet1.NativeObj;

                if (packet1.StreamIndex != videoStream1.Index)
                {
                    Console.WriteLine("Wrong stream");
                    continue;
                }

                bool frameFinished;
                if (!AVCodec.DecodeVideo2(codecCtx1, frame1, out frameFinished, packet1))
                {
                    Console.WriteLine("Failed to decode frame!");
                    return;
                }

                if (frameFinished)
                {
//                    byte** src = &frame->data_0;
//                    byte** dst = &frameBGR->data_0;
//                    FFmpegInvoke.sws_scale(swsCtx, src, frame->linesize, 0, codecCtx->height, dst,
//                        frameBGR->linesize);

                    SWS.Scale(swsCtx1, frame1.Data0, frame1.LineSize, 0, codecCtx1.Height, frameBGR1.Data0, frameBGR1.LineSize);
                    

                    byte* convertedFrameAddress = frameBGR->data_0;

                    var imageBufferPtr = new IntPtr(convertedFrameAddress);

                    int linesize = frameBGR->linesize[0];

                    using (var bitmap = new Bitmap(codecCtx->width, codecCtx->height, linesize, PixelFormat.Format24bppRgb, imageBufferPtr))
                    {
                        bitmap.Save(@"frame.buffer." + frameId + ".png", ImageFormat.Png);
                    }
                    frameId++;
                }

//                FFmpegInvoke.av_free_packet(&packet);
                AV.FreePacket(packet1);
                packet1 = new AVPacket();
            }



            FFmpegInvoke.av_free(buffer);
            FFmpegInvoke.av_free(frameBGR);
            FFmpegInvoke.av_free(frame);

            FFmpegInvoke.sws_freeContext(swsCtx);

            FFmpegInvoke.avcodec_close(codecCtx);
            FFmpegInvoke.avcodec_close(origCodecCtx);

            FFmpegInvoke.avformat_close_input(&formatCtx);
        }