Esempio n. 1
0
            public static AVFrame GetInstance(IntPtr p)
            {
                var f      = new AVFrame();
                int curPos = 0;
                var data   = new IntPtr[8];

                for (int i = 0; i < 8; ++i)
                {
                    data[i] = Marshal.ReadIntPtr(p, curPos);
                    curPos += 4;
                }
                f.Data = data;
                var lineSize = new int[8];

                for (int i = 0; i < 8; ++i)
                {
                    lineSize[i] = Marshal.ReadInt32(p, curPos);
                    curPos     += 4;
                }
                f.LineSize     = lineSize;
                f.ExtendedData = Marshal.ReadIntPtr(p, curPos);
                curPos        += 4;
                f.Width        = Marshal.ReadInt32(p, curPos);
                curPos        += 4;
                f.Height       = Marshal.ReadInt32(p, curPos);
                curPos        += 4;
                f.NbSamples    = Marshal.ReadInt32(p, curPos);
                curPos        += 4;
                f.Format       = Marshal.ReadInt32(p, curPos);
                curPos        += 4;
                f.KeyFrame     = Marshal.ReadInt32(p, curPos);
                curPos        += 4;
                f.PictType     = (AVPictureType)Marshal.ReadInt32(p, curPos);
                return(f);
            }
Esempio n. 2
0
        public virtual int init(int[] extraData)
        {
            context = MpegEncContext.avcodec_alloc_context();

            picture = AVFrame.avcodec_alloc_frame();

            packet = new AVPacket();
            packet.av_init_packet();

            if (extraData != null)
            {
                context.extradata_size = extraData.Length;
                // Add 4 additional values to avoid exceptions while parsing
                int[] extraDataPlus4 = new int[context.extradata_size + 4];
                Array.Copy(extraData, 0, extraDataPlus4, 0, context.extradata_size);
                context.extradata = extraDataPlus4;
            }

            int result = context.avcodec_open(new com.twilight.h264.decoder.H264Decoder());

            if (result < 0)
            {
                return(result);
            }

            gotPicture[0] = 0;

            return(0);
        }
Esempio n. 3
0
        private static void YUV2RGB_WOEdge(AVFrame f, int[] rgb)
        {
            var luma     = f.data_base[0];
            var cb       = f.data_base[1];
            var cr       = f.data_base[2];
            int stride   = f.linesize[0];
            int strideCb = f.linesize[1];
            int strideCr = f.linesize[2];


            for (int y = 0; y < f.imageHeightWOEdge; y++)
            {
                int lineOffLuma = y * stride + f.data_offset[0];
                int lineOffCb   = (y >> 1) * strideCb + f.data_offset[1];
                int lineOffCr   = (y >> 1) * strideCr + f.data_offset[2];
                int rgbOff      = y * f.imageWidthWOEdge;

                for (int x = 0; x < f.imageWidthWOEdge; x++)
                {
                    int c = luma[lineOffLuma + x] - 16;
                    int d = cb[lineOffCb + (x >> 1)] - 128;
                    int e = cr[lineOffCr + (x >> 1)] - 128;

                    byte red   = (byte)MathUtils.Clamp((298 * c + 409 * e + 128) >> 8, 0, 255);
                    byte green = (byte)MathUtils.Clamp((298 * c - 100 * d - 208 * e + 128) >> 8, 0, 255);
                    byte blue  = (byte)MathUtils.Clamp((298 * c + 516 * d + 128) >> 8, 0, 255);
                    byte alpha = 255;

                    rgb[rgbOff + x] = (alpha << 24) | (red << 16) | (green << 8) | (blue << 0);
                }
            }
        }
        public byte[] ConvertFrame(ref AVFrame frame)
        {
            //int linesz0 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 0);
            //int linesz1 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 1);
            //int linesz2 = ffmpeg.av_image_get_linesize(_srcPixelFormat, _dstSize.Width, 2);

            //byte_ptrArray4 src = new byte_ptrArray4();
            //int_array4 srcStride = new int_array4();

            //fixed (byte* pSrcData = srcData)
            //{
            //    ffmpeg.av_image_fill_arrays(ref src, ref srcStride, pSrcData, _srcPixelFormat, _srcWidth, _srcHeight, 1).ThrowExceptionIfError();
            //}

            ffmpeg.sws_scale(_pConvertContext, frame.data, frame.linesize, 0, frame.height, _dstData, _dstLinesize).ThrowExceptionIfError();

            int outputBufferSize = ffmpeg.av_image_get_buffer_size(_dstPixelFormat, _dstWidth, _dstHeight, 1);

            byte[] outputBuffer = new byte[outputBufferSize];

            fixed(byte *pOutData = outputBuffer)
            {
                ffmpeg.av_image_copy_to_buffer(pOutData, outputBufferSize, _dstData, _dstLinesize, _dstPixelFormat, _dstWidth, _dstHeight, 1)
                .ThrowExceptionIfError();
            }

            return(outputBuffer);
        }
Esempio n. 5
0
        public static Bitmap ToImageWOEdges(this AVFrame f, int Width, int Height)
        {
            var Out = new Bitmap(Math.Min(Width, f.imageWidthWOEdge), Math.Min(Height, f.imageHeightWOEdge));

            Graphics.FromImage(Out).DrawImage(f.ToImageWOEdges(), Point.Empty);
            return(Out);
        }
        //private void FileSourceDecdoer_OnVideoFrame(byte[] buffer, int width, int height)
        private void FileSourceDecoder_OnVideoFrame(ref AVFrame frame)
        {
            if (OnVideoSourceEncodedSample != null)
            {
                int frameRate = (int)_fileSourceDecoder.VideoAverageFrameRate;
                frameRate = (frameRate <= 0) ? DEFAULT_FRAME_RATE : frameRate;
                uint timestampDuration = (uint)(VIDEO_SAMPLING_RATE / frameRate);

                //Console.WriteLine($"framerate {frameRate}, timestamp duration {timestampDuration}.");

                //var frame = _videoEncoder.MakeFrame(buffer, width, height);
                var encodedSample = _videoEncoder.Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), frame, frameRate, _forceKeyFrame);

                if (encodedSample != null)
                {
                    // Note the event handler can be removed while the encoding is in progress.
                    OnVideoSourceEncodedSample?.Invoke(timestampDuration, encodedSample);

                    if (_forceKeyFrame)
                    {
                        _forceKeyFrame = false;
                    }
                }
            }
        }
Esempio n. 7
0
        private unsafe void DrawAsciiFrame(AVFrame frame)
        {
            // We don't call Console.Clear() here because it actually adds stutter.
            // Go ahead and try this example in Alacritty to see how smooth it is!
            asciiBuilder.Clear();
            Console.SetCursorPosition(0, 0);
            int length = frame.width * frame.height;

            var RawData = new ReadOnlySpan <byte>(frame.data[0], frame.linesize[0] * frame.height);

            // Since we know that the frame has the exact size of the terminal window,
            // we have no need to add any newline characters. Thus we can just go through
            // the entire byte array to build the ASCII converted string.
            for (int i = 0; i < length; i++)
            {
                asciiBuilder.Append(asciiPixels[RangeMap(RawData[i], 0, 255, 0, asciiPixels.Length - 1)]);
            }

            Console.Write(asciiBuilder.ToString());
            if (footerText.Length > 0)
            {
                Console.Write(footerText);
            }
            Console.Out.Flush();
        }
Esempio n. 8
0
        private unsafe void AudioDecoder_OnAudioFrame(ref AVFrame avFrame)
        {
            if (OnAudioSourceEncodedSample == null)
            {
                return;
            }

            // Avoid to create several times buffer of the same size
            if (_currentNbSamples != avFrame.nb_samples)
            {
                bufferSize        = ffmpeg.av_samples_get_buffer_size(null, avFrame.channels, avFrame.nb_samples, AVSampleFormat.AV_SAMPLE_FMT_S16, 1);
                buffer            = new byte[bufferSize];
                _currentNbSamples = avFrame.nb_samples;
            }

            // Convert audio
            int dstSampleCount;

            fixed(byte *pBuffer = buffer)
            dstSampleCount = ffmpeg.swr_convert(_audioDecoder._swrContext, &pBuffer, bufferSize, avFrame.extended_data, avFrame.nb_samples).ThrowExceptionIfError();

            Console.WriteLine($"nb_samples:{avFrame.nb_samples} - bufferSize:{bufferSize} - dstSampleCount:{dstSampleCount}");

            if (dstSampleCount > 0)
            {
                // FFmpeg AV_SAMPLE_FMT_S16 will store the bytes in the correct endianess for the underlying platform.
                short[] pcm           = buffer.Take(dstSampleCount * 2).Where((x, i) => i % 2 == 0).Select((y, i) => BitConverter.ToInt16(buffer, i * 2)).ToArray();
                var     encodedSample = _audioEncoder.EncodeAudio(pcm, _audioFormatManager.SelectedFormat);

                OnAudioSourceEncodedSample?.Invoke((uint)encodedSample.Length, encodedSample);
            }
        }
Esempio n. 9
0
    public bool AvFrameToImageByteArray(AVFrame frame, out byte[] pngData)
    {
        AVCodec *       outCodec    = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_PNG);
        AVCodecContext *outCodecCtx = ffmpeg.avcodec_alloc_context3(outCodec);

        outCodecCtx->width         = _pCodecContext->width;
        outCodecCtx->height        = _pCodecContext->height;
        outCodecCtx->pix_fmt       = AVPixelFormat.AV_PIX_FMT_RGB24;
        outCodecCtx->codec_type    = AVMediaType.AVMEDIA_TYPE_VIDEO;
        outCodecCtx->time_base.num = _pCodecContext->time_base.num;
        outCodecCtx->time_base.den = _pCodecContext->time_base.den;

        if (ffmpeg.avcodec_open2(outCodecCtx, outCodec, null) < 0)
        {
            pngData = new byte[] { };
            return(false);
        }

        AVPacket outPacket = new AVPacket();

        ffmpeg.av_init_packet(&outPacket);
        outPacket.size = 0;
        outPacket.data = null;

        ffmpeg.avcodec_send_frame(outCodecCtx, &frame);
        ffmpeg.avcodec_receive_packet(outCodecCtx, &outPacket);

        pngData = new byte[outPacket.size];

        Marshal.Copy((IntPtr)outPacket.data, pngData, 0, outPacket.size);
        return(true);
    }
Esempio n. 10
0
        public void TestDecode()
        {
            _codecContext.Open2(AVCodec.FindDecoder(_codecContext.CodecId));
            int      frames = 0;
            AVPacket packet;
            var      frame = new AVFrame();

            while ((packet = _formatContext.ReadFrame()) != null)
            {
                if (packet.StreamIndex != 0)
                {
                    continue;
                }
                bool picture = _codecContext.DecodeVideo2(packet, frame);
                if (!picture)
                {
                    continue;
                }

                frames++;

                // to check the frames visually, not part of normal test
                // PgmSave(frames + ".pgm", frame);
            }
            Assert.AreEqual(245, frames);
        }
Esempio n. 11
0
        public unsafe bool TryDecodeNextFrame(out AVFrame frame)
        {
            ffmpeg.av_frame_unref(this._pFrame);
            int error2;

            do
            {
                try
                {
                    do
                    {
                        error2 = ffmpeg.av_read_frame(this._pFormatContext, this._pPacket);
                        if (error2 == ffmpeg.AVERROR_EOF)
                        {
                            frame = *this._pFrame;
                            return(false);
                        }
                        error2.ThrowExceptionIfError();
                    }while (this._pPacket->stream_index != this._streamIndex);
                    ffmpeg.avcodec_send_packet(this._pCodecContext, this._pPacket).ThrowExceptionIfError();
                }
                finally
                {
                    ffmpeg.av_packet_unref(this._pPacket);
                }
                error2 = ffmpeg.avcodec_receive_frame(this._pCodecContext, this._pFrame);
            }while (error2 == ffmpeg.AVERROR(11));
            error2.ThrowExceptionIfError();
            frame = *this._pFrame;
            return(true);
        }
Esempio n. 12
0
        public static FFmpegFrame GetFrameRgba()
        {
            if (!IsInitialized)
            {
                throw new InvalidOperationException("Tried to use uninitialized codec!");
            }

            AVFrame managedFrame = Marshal.PtrToStructure <AVFrame>((IntPtr)_frame);

            EnsureScalerSetup(managedFrame.width, managedFrame.height);

            byte *[] data = managedFrame.data.ToArray();

            int[] lineSizes = managedFrame.linesize.ToArray();

            byte[] dst = new byte[managedFrame.width * managedFrame.height * 4];

            fixed(byte *ptr = dst)
            {
                byte *[] dstData = new byte *[] { ptr };

                int[] dstLineSizes = new int[] { managedFrame.width * 4 };

                ffmpeg.sws_scale(_scalerCtx, data, lineSizes, 0, managedFrame.height, dstData, dstLineSizes);
            }

            return(new FFmpegFrame()
            {
                Width = managedFrame.width,
                Height = managedFrame.height,

                Data = dst
            });
        }
Esempio n. 13
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            try
            {
                ffmpeg.sws_scale(_pConvertContext,
                                 sourceFrame.data, sourceFrame.linesize,
                                 0, sourceFrame.height,
                                 _dstData, _dstLinesize);
            }
            catch (AccessViolationException ex)
            {
                throw new AccessViolationException(ex.ToString());
            }

            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);

            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _destinationSize.Width,
                height = _destinationSize.Height
            });
        }
Esempio n. 14
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            ffmpeg.sws_scale(_pConvertContext,
                             sourceFrame.data, sourceFrame.linesize,
                             0, sourceFrame.height,
                             _dstData, _dstLinesize);


            var data = new byte_ptrArray8();

            data.UpdateFrom(_dstData);

            var linesize = new int_array8();

            linesize.UpdateFrom(_dstLinesize);

            return(new AVFrame
            {
                data = data,
                linesize = linesize,
                width = _destinationSize.Width,
                height = _destinationSize.Height,
                pkt_dts = sourceFrame.pkt_dts,
                pts = sourceFrame.pts,
                sample_rate = sourceFrame.sample_rate
            });
        }
Esempio n. 15
0
        public static void YUV2RGB(AVFrame f, int[] rgb)
        {
            var luma         = f.data_base[0];
            var cb           = f.data_base[1];
            var cr           = f.data_base[2];
            int stride       = f.linesize[0];
            int strideChroma = f.linesize[1];

            for (int y = 0; y < f.imageHeight; y++)
            {
                int lineOffLuma   = y * stride;
                int lineOffChroma = (y >> 1) * strideChroma;

                for (int x = 0; x < f.imageWidth; x++)
                {
                    int c = luma[lineOffLuma + x] - 16;
                    int d = cb[lineOffChroma + (x >> 1)] - 128;
                    int e = cr[lineOffChroma + (x >> 1)] - 128;

                    byte red   = (byte)MathUtils.Clamp((298 * c + 409 * e + 128) >> 8, 0, 255);
                    byte green = (byte)MathUtils.Clamp((298 * c - 100 * d - 208 * e + 128) >> 8, 0, 255);
                    byte blue  = (byte)MathUtils.Clamp((298 * c + 516 * d + 128) >> 8, 0, 255);
                    byte alpha = 255;

                    rgb[lineOffLuma + x] = (alpha << 24) | (red << 16) | (green << 8) | (blue << 0);
                }
            }
        }
Esempio n. 16
0
        public bool TryDecodeNextFrame(out AVFrame frame)
        {
            ffmpeg.av_frame_unref(_pFrame);
            int error = -1;

            do
            {
                try
                {
                    do
                    {
                        error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);
                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            frame = *_pFrame;
                            return(false);
                        }

                        error.ThrowExceptionIfError();
                    } while (_pPacket->stream_index != _streamIndex);

                    ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError();
                }
                finally
                {
                    ffmpeg.av_packet_unref(_pPacket);
                }
                error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
            } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

            error.ThrowExceptionIfError();
            frame = *_pFrame;
            return(true);
        }
Esempio n. 17
0
        public static FFmpegFrame GetFrameRgba()
        {
            if (!IsInitialized)
            {
                throw new InvalidOperationException("Tried to use uninitialized codec!");
            }

            AVFrame ManagedFrame = Marshal.PtrToStructure <AVFrame>((IntPtr)Frame);

            EnsureScalerSetup(ManagedFrame.width, ManagedFrame.height);

            byte *[] Data = ManagedFrame.data.ToArray();

            int[] LineSizes = ManagedFrame.linesize.ToArray();

            byte[] Dst = new byte[ManagedFrame.width * ManagedFrame.height * 4];

            fixed(byte *Ptr = Dst)
            {
                byte *[] DstData = new byte *[] { Ptr };

                int[] DstLineSizes = new int[] { ManagedFrame.width * 4 };

                ffmpeg.sws_scale(ScalerCtx, Data, LineSizes, 0, ManagedFrame.height, DstData, DstLineSizes);
            }

            return(new FFmpegFrame()
            {
                Width = ManagedFrame.width,
                Height = ManagedFrame.height,

                Data = Dst
            });
        }
Esempio n. 18
0
        public void TestEncode()
        {
            AVLog.Callback += (level, msg) => Console.WriteLine(level + ": " + msg);
            AVLog.Level     = (int)AVLogLevel.Trace;

            _codecContext.Open2(AVCodec.FindDecoder(_codecContext.CodecId));
            AVPacket packet;
            var      frame = new AVFrame();

            var codec         = AVCodec.FindEncoderByName("png");
            var encodeContext = new AVCodecContext(codec)
            {
                PixelFormat = AVPixelFormat.RGB24,
                Width       = _codecContext.Width,
                Height      = _codecContext.Height
            };

            encodeContext.Open2(codec);

            var convContext = SWSContext.GetContext(_codecContext.Width, _codecContext.Height, AVPixelFormat.YUV420P,
                                                    AVPixelFormat.RGB24);
            var convBuffer = new AVPicture(AVPixelFormat.RGB24, _codecContext.Width, _codecContext.Height).AsFrame();

            int frameCounter = 0;
            int readCounter  = 0;

            while ((packet = _formatContext.ReadFrame()) != null)
            {
                if (packet.StreamIndex != 0)
                {
                    continue;
                }
                bool pic = _codecContext.DecodeVideo2(packet, frame);
                if (!pic)
                {
                    continue;
                }

                readCounter++;
                convContext.Scale(frame, convBuffer, 0, _codecContext.Height);

                var outPacket = encodeContext.EncodeVideo2(convBuffer);
                if (outPacket != null)
                {
                    frameCounter++;

                    // for checking the frames, not part of normal test
                    //var data = new byte[outPacket.Size];
                    //Marshal.Copy(outPacket.Data, data, 0, data.Length);
                    //File.WriteAllBytes((frameCounter++) + ".png", data);
                }
            }

            Assert.AreEqual(readCounter, frameCounter);
            Assert.AreEqual(245, readCounter);

            encodeContext.Close();
            _codecContext.Close();
        }
Esempio n. 19
0
        public static unsafe PointerBitmap AsPointerBitmap(AVFrame frame)
        {
            _EnsureBinariesAreSet();

            var binfo = new BitmapInfo(frame.width, frame.height, Pixel.BGR24.Format, frame.linesize[0]);

            return(new PointerBitmap((IntPtr)frame.data[0], binfo, true));
        }
Esempio n. 20
0
    public void SavePng(AVFrame frame, string fileName)
    {
        var texture = AVFrameToTexture2D(frame);

        File.WriteAllBytes(fileName, texture.EncodeToPNG());

        Debug.Log($"Saved {fileName}");
    }
Esempio n. 21
0
        public void Encode(AVFrame frame)
        {
            if (frame.format != (int)_pCodecContext->pix_fmt)
            {
                throw new ArgumentException("Invalid pixel format.", nameof(frame));
            }
            if (frame.width != _frameSize.Width)
            {
                throw new ArgumentException("Invalid width.", nameof(frame));
            }
            if (frame.height != _frameSize.Height)
            {
                throw new ArgumentException("Invalid height.", nameof(frame));
            }
            if (frame.linesize[0] < _linesizeY)
            {
                throw new ArgumentException("Invalid Y linesize.", nameof(frame));
            }
            if (frame.linesize[1] < _linesizeU)
            {
                throw new ArgumentException("Invalid U linesize.", nameof(frame));
            }
            if (frame.linesize[2] < _linesizeV)
            {
                throw new ArgumentException("Invalid V linesize.", nameof(frame));
            }
            if (frame.data[1] - frame.data[0] < _ySize)
            {
                throw new ArgumentException("Invalid Y data size.", nameof(frame));
            }
            if (frame.data[2] - frame.data[1] < _uSize)
            {
                throw new ArgumentException("Invalid U data size.", nameof(frame));
            }

            var pPacket = ffmpeg.av_packet_alloc();

            try
            {
                int error;

                do
                {
                    ffmpeg.avcodec_send_frame(_pCodecContext, &frame).ThrowExceptionIfError();
                    ffmpeg.av_packet_unref(pPacket);
                    error = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket);
                } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

                error.ThrowExceptionIfError();

                using var packetStream = new UnmanagedMemoryStream(pPacket->data, pPacket->size);
                packetStream.CopyTo(_stream);
            }
            finally
            {
                ffmpeg.av_packet_free(&pPacket);
            }
        }
Esempio n. 22
0
        public AVFrame Convert(AVFrame sourceFrame)
        {
            var dstData     = new byte_ptrArray4();
            var dstLinesize = new int_array4();

            ffmpeg.sws_scale(_pConvertContext, sourceFrame.data, sourceFrame.linesize, 0, sourceFrame.height, dstData, dstLinesize);

            return(new AVFrame());
        }
Esempio n. 23
0
 public static bool DecodeVideo2(AVCodecContext codecContext, AVFrame frame, out bool frameFinished, AVPacket packet)
 {
     int ffNum = 0;
     bool ok =
         FFmpegInvoke.avcodec_decode_video2(codecContext.NativeObj, frame.NativeObj, &ffNum, packet.NativeObj) >=
         0;
     frameFinished = ffNum > 0;
     return ok;
 }
Esempio n. 24
0
        private static unsafe void EncodeImagesToH264()
        {
            var frameFiles     = Directory.GetFiles(".", "frame.*.jpg").OrderBy(x => x).ToArray();
            var fistFrameImage = Image.FromFile(frameFiles.First());

            var outputFileName         = "out.h264";
            var fps                    = 25;
            var sourceSize             = fistFrameImage.Size;
            var sourcePixelFormat      = AVPixelFormat.AV_PIX_FMT_BGR24;
            var destinationSize        = sourceSize;
            var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;

            using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
            {
                using (var fs = File.Open(outputFileName, FileMode.Create)) // be advise only ffmpeg based player (like ffplay or vlc) can play this file, for the others you need to go through muxing
                {
                    using (var vse = new H264VideoStreamEncoder(fs, fps, destinationSize))
                    {
                        var frameNumber = 0;
                        foreach (var frameFile in frameFiles)
                        {
                            byte[] bitmapData;

                            using (var frameImage = Image.FromFile(frameFile))
                                using (var frameBitmap = frameImage is Bitmap bitmap ? bitmap : new Bitmap(frameImage))
                                {
                                    bitmapData = GetBitmapData(frameBitmap);
                                }

                            fixed(byte *pBitmapData = bitmapData)
                            {
                                var data = new byte_ptrArray8 {
                                    [0] = pBitmapData
                                };
                                var linesize = new int_array8 {
                                    [0] = bitmapData.Length / sourceSize.Height
                                };
                                var frame = new AVFrame
                                {
                                    data     = data,
                                    linesize = linesize,
                                    height   = sourceSize.Height
                                };
                                var convertedFrame = vfc.Convert(frame);

                                convertedFrame.pts = frameNumber * fps;
                                vse.Encode(convertedFrame);
                            }

                            Console.WriteLine($"frame: {frameNumber}");
                            frameNumber++;
                        }
                    }
                }
            }
        }
Esempio n. 25
0
        public static Bitmap imageFromFrameWithoutEdges(AVFrame f, int Width, int Height)
        {
            var XEdge = (f.imageWidth - f.imageWidthWOEdge) / 2;
            var YEdge = (f.imageHeight - f.imageHeightWOEdge) / 2;
            var Out   = new Bitmap(Math.Min(Width, f.imageWidthWOEdge), Math.Min(Height, f.imageHeightWOEdge));
            var In    = imageFromFrame(f);

            Graphics.FromImage(Out).DrawImage(In, new Point(-XEdge, -YEdge));
            return(Out);
        }
Esempio n. 26
0
    public static AVFrame[][] ReturnRectangularAVFrameArray(int size1, int size2)
    {
        AVFrame[][] newArray = new AVFrame[size1][];
        for (int array1 = 0; array1 < size1; array1++)
        {
            newArray[array1] = new AVFrame[size2];
        }

        return(newArray);
    }
Esempio n. 27
0
        public void TryEncodeNextPacket(AVFrame uncompressed_frame)
        {
            var encoded_packet = ffmpeg.av_packet_alloc();

            ffmpeg.av_init_packet(encoded_packet);

            try
            {
                int error;
                do
                {
                    if (uncompressed_frame.channels == 0)
                    {
                        ffmpeg.avcodec_send_frame(videoCodecContextoutput, &uncompressed_frame);

                        error            = ffmpeg.avcodec_receive_packet(videoCodecContextoutput, encoded_packet);
                        enc_stream_index = encoded_packet->stream_index;

                        Console.WriteLine("videoPAcket");
                        encoded_packet->pts = (long)(ffmpeg.av_rescale_q(index, videoCodecContextoutput->time_base, oFormatContext->streams[enc_stream_index]->time_base));
                        encoded_packet->dts = ffmpeg.av_rescale_q(index, videoCodecContextoutput->time_base, oFormatContext->streams[enc_stream_index]->time_base);

                        Console.WriteLine($"{encoded_packet->pts}   /  {encoded_packet->dts}");
                        error = 1;
                    }
                    else
                    {
                        //ffmpeg.av_audio_fifo_alloc(audioCodecContext->sample_fmt, audioCodecContext->channels, 1);
                        Console.WriteLine(audioCodecContext->time_base.num + "   //   " + audioCodecContext->time_base.den);

                        // ffmpeg.avcodec_send_frame(audioCodecContext, &uncompressed_frame);
                        error = 1;

                        //  error = ffmpeg.avcodec_receive_packet(audioCodecContext, encoded_packet);
                        enc_stream_index = encoded_packet->stream_index;

                        //Console.WriteLine("AudioPacket");
                        //encoded_packet->pts = (long)(ffmpeg.av_rescale_q(encoded_packet->pts, audioCodecContext->time_base, oFormatContext->streams[enc_stream_index]->time_base));
                        //encoded_packet->dts = ffmpeg.av_rescale_q(encoded_packet->dts, audioCodecContext->time_base, oFormatContext->streams[enc_stream_index]->time_base);
                    }

                    index++;
                    //Console.WriteLine("==========");
                    //Console.WriteLine(encoded_packet->pts + "/" + encoded_packet->dts);

                    //write frame in video file
                    //   ffmpeg.av_write_frame(oFormatContext, encoded_packet);
                    ffmpeg.av_interleaved_write_frame(oFormatContext, encoded_packet);
                } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) || error == ffmpeg.AVERROR(ffmpeg.AVERROR_EOF));
            }
            finally
            {
                ffmpeg.av_packet_unref(encoded_packet);
            }
        }
Esempio n. 28
0
    public bool TryDecodeNextFrame(out AVFrame frame)
    {
        //Обнуление фреймов
        ffmpeg.av_frame_unref(_pFrame);
        ffmpeg.av_frame_unref(_receivedFrame);

        int error;

        do
        {
            try
            {
                do
                {
                    ffmpeg.av_packet_unref(_pPacket);

                    error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);

                    if (error == ffmpeg.AVERROR_EOF)
                    {
                        frame = *_pFrame;
                        return(false);
                    }
                } while (_pPacket->stream_index != _streamIndex);

                ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket);
            }
            finally
            {
                ffmpeg.av_packet_unref(_pPacket);
            }

            error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
        }while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

        if (error == ffmpeg.AVERROR(ffmpeg.EAGAIN))
        {
            Console.WriteLine("error == ffmpeg.AVERROR(ffmpeg.EAGAIN)");
        }

        if (isHwAccelerate)
        {
            ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError();
            frame = *_receivedFrame;
            //Console.WriteLine("Transfer OK");
        }
        else
        {
            //Console.WriteLine("PIX is not HW");
            frame = *_pFrame;
        }

        return(true);
    }
Esempio n. 29
0
        public void Properties_ReturnNativeValues()
        {
            var presentationTimestamp = 4525;
            var width          = 5465415;
            var height         = 654312;
            var lineSize       = default(NativeIntArray);
            var lineSizeValues = new int[] { 1, 2, 3, 4, 5, 6, 7, 8 };

            lineSize.UpdateFrom(lineSizeValues);
            var data       = default(NativePointerArray);
            var dataValues = new byte *[] { (byte *)8, (byte *)7, (byte *)6, (byte *)5, (byte *)4, (byte *)3, (byte *)2, (byte *)1 };

            data.UpdateFrom(dataValues);
            var pictureType = NativeAVPictureType.AV_PICTURE_TYPE_P;
            var pixelFormat = NativeAVPixelFormat.AV_PIX_FMT_AYUV64BE;

            NativeAVFrame nativeFrame = new NativeAVFrame()
            {
                pts       = presentationTimestamp,
                width     = width,
                height    = height,
                linesize  = lineSize,
                data      = data,
                pict_type = pictureType,
                format    = (int)pixelFormat,
            };

            var ffmpegMock = new Mock <FFmpegClient>();

            ffmpegMock
            .Setup(c => c.FreeFrame(It.IsAny <IntPtr>()))
            .Verifiable();
            var ffmpegClient = ffmpegMock.Object;

            using (var frame = new AVFrame(ffmpegClient, new AVFrameHandle(ffmpegClient, &nativeFrame)))
            {
                Assert.Equal(presentationTimestamp, frame.PresentationTimestamp);
                Assert.Equal(width, frame.Width);
                Assert.Equal(height, frame.Height);
                Assert.Equal(lineSizeValues, frame.LineSize.ToArray());
                Assert.Equal(8, (int)frame.Data[0]);
                Assert.Equal(7, (int)frame.Data[1]);
                Assert.Equal(6, (int)frame.Data[2]);
                Assert.Equal(5, (int)frame.Data[3]);
                Assert.Equal(4, (int)frame.Data[4]);
                Assert.Equal(3, (int)frame.Data[5]);
                Assert.Equal(2, (int)frame.Data[6]);
                Assert.Equal(1, (int)frame.Data[7]);
                Assert.Equal(pictureType, frame.PictureType);
                Assert.Equal(pixelFormat, frame.Format);
            }

            ffmpegMock.Verify();
        }
Esempio n. 30
0
        public bool TryDecodeFrame(out AVFrame frame, TimeSpan position)
        {
            ffmpeg.av_frame_unref(_pFrame);
            ffmpeg.av_frame_unref(_receivedFrame);
            int error;

            AVRational timebase     = _pFormatContext->streams[_streamIndex]->time_base;
            float      AV_TIME_BASE = (float)timebase.den / timebase.num;
            long       tc           = Convert.ToInt64(position.TotalSeconds * AV_TIME_BASE);

            if (ffmpeg.av_seek_frame(_pFormatContext, _streamIndex, tc, ffmpeg.AVSEEK_FLAG_BACKWARD) < 0)
            {
                ffmpeg.av_seek_frame(_pFormatContext, _streamIndex, tc, ffmpeg.AVSEEK_FLAG_ANY).ThrowExceptionIfError();
            }
            do
            {
                try {
                    do
                    {
                        ffmpeg.av_packet_unref(_pPacket);
                        error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);

                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            frame = *_pFrame;
                            return(false);
                        }

                        error.ThrowExceptionIfError();
                    } while (_pPacket->stream_index != _streamIndex);

                    ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError();
                }
                finally {
                    ffmpeg.av_packet_unref(_pPacket);
                }

                error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
            } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

            error.ThrowExceptionIfError();

            if (_pCodecContext->hw_device_ctx != null)
            {
                ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError();
                frame = *_receivedFrame;
            }
            else
            {
                frame = *_pFrame;
            }

            return(true);
        }
Esempio n. 31
0
    // Update is called once per frame
    void Update()
    {
        AVFrame receivedFrame = receiver.ReceiveFrame();

        this.logText.text = "Received!";

        texture.LoadRawTextureData((IntPtr)receivedFrame.data[0], this.receiver.VideoWidth * this.receiver.VideoHeight * 3);
        texture.Apply();

        streamingViewer.GetComponent <Renderer>().material.mainTexture = texture;
    }
Esempio n. 32
0
 public static bool Fill(AVFrame frame, SByteBuffer buffer, AutoGen.AVPixelFormat format, int width,
     int height)
 {
     return FFmpegInvoke.avpicture_fill((AutoGen.AVPicture*)frame.NativeObj, buffer.NativeObj, format, width, height) == 0;
 }
Esempio n. 33
0
 public static AVFrame GetInstance(IntPtr p)
 {
     var f = new AVFrame();
     int curPos = 0;
     var data = new IntPtr[8];
     for (int i = 0; i < 8; ++i)
     {
         data[i] = Marshal.ReadIntPtr(p, curPos);
         curPos += 4;
     }
     f.Data = data;
     var lineSize = new int[8];
     for (int i = 0; i < 8; ++i)
     {
         lineSize[i] = Marshal.ReadInt32(p, curPos);
         curPos += 4;
     }
     f.LineSize = lineSize;
     f.ExtendedData = Marshal.ReadIntPtr(p, curPos);
     curPos += 4;
     f.Width = Marshal.ReadInt32(p, curPos);
     curPos += 4;
     f.Height = Marshal.ReadInt32(p, curPos);
     curPos += 4;
     f.NbSamples = Marshal.ReadInt32(p, curPos);
     curPos += 4;
     f.Format = Marshal.ReadInt32(p, curPos);
     curPos += 4;
     f.KeyFrame = Marshal.ReadInt32(p, curPos);
     curPos += 4;
     f.PictType = (AVPictureType)Marshal.ReadInt32(p, curPos);
     return f;
 }