Ejemplo n.º 1
0
        private unsafe void EncodeImagesToH2642(object state)
        {
            try
            {
                while (isEncodingEvent2.WaitOne())
                {
                    if (decodedFrameQueue2.TryDequeue(out queueFrame2))
                    {
                        var sourcePixelFormat      = AVPixelFormat.AV_PIX_FMT_BGR24;
                        var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P; //for h.264

                        using (var vfc = new VideoFrameConverter(videoInfo2.SourceFrameSize, sourcePixelFormat, videoInfo2.DestinationFrameSize, destinationPixelFormat))
                        {
                            var convertedFrame = vfc.Convert(queueFrame2);
                            convertedFrame.pts = frameNumber2;       //to do
                            h264Encoder2.TryEncodeNextPacket(convertedFrame);
                        }

                        frameNumber2++;
                    }
                }
            }
            catch (ObjectDisposedException e)
            {
                Console.WriteLine(e.Message);
            }
        }
        private unsafe void DecodeAllFramesToImages()
        {
            using (var vsd = new VideoStreamDecoder(device))
            {
                //Console.WriteLine($"codec name: {vsd.CodecName}");

                var info = vsd.GetContextInfo();
                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                var sourceSize             = vsd.FrameSize;
                var sourcePixelFormat      = vsd.PixelFormat;
                var destinationSize        = sourceSize;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    var frameNumber = 0;
                    while (vsd.TryDecodeNextFrame(out var frame) && activeThread)
                    {
                        var convertedFrame = vfc.Convert(frame);

                        Bitmap bitmap;

                        bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]);
                        BitmapToImageSource(bitmap);

                        frameNumber++;
                    }
                }
            }
        }
Ejemplo n.º 3
0
        public int DecodeFrame()
        {
            ffmpeg.av_frame_unref(_pFrame);
            var frame = new Frame();

            frame.hasError = false;
            int error;

            do
            {
                try
                {
                    do
                    {
                        error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);
                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            frame.hasError = true;
                            frame.errorMsg = "EOF";
                            //frame = *_pFrame;
                            return(1);
                        }

                        error.ThrowExceptionIfError();
                    } while (_pPacket->stream_index != _streamIndex);

                    ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError();
                }
                finally
                {
                    ffmpeg.av_packet_unref(_pPacket);
                }

                error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);
            } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

            var frameConverter = new VideoFrameConverter(decoderConfiguration); //TODO decoder should not be reinitialized
            var outputframe    = frameConverter.Convert(*_pFrame);

            if (newFrameEvent != null)
            {
                newFrameEvent.Invoke(new Frame
                {
                    hasError    = false,
                    pixelFormat = Models.PixelFormat.RGB,
                    resolution  = decoderConfiguration.outputResolution,
                    rgbFrame    = GetByteArrayFromRGBFrame(&outputframe)
                });
            }
            error.ThrowExceptionIfError();
            return(0);
        }
Ejemplo n.º 4
0
        private static void InitialiseTestPattern()
        {
            _testPattern = new Bitmap(TEST_PATTERN_IMAGE_PATH);

            FFmpegInit.Initialise(FfmpegLogLevelEnum.AV_LOG_DEBUG);

            _ffmpegEncoder = new FFmpegVideoEncoder();
            Console.WriteLine($"Codec name {_ffmpegEncoder.GetCodecName()}.");

            _videoFrameConverter = new VideoFrameConverter(
                _testPattern.Width, _testPattern.Height,
                AVPixelFormat.AV_PIX_FMT_BGRA,
                _testPattern.Width, _testPattern.Height,
                AVPixelFormat.AV_PIX_FMT_YUV420P);
        }
Ejemplo n.º 5
0
        private static void RoundTripNoEncodingTestPattern()
        {
            var testBmp = new Bitmap(TEST_PATTERN_IMAGE_PATH);
            int w       = testBmp.Width;
            int h       = testBmp.Height;

            var rgbToi420 = new VideoFrameConverter(
                testBmp.Size.Width, testBmp.Size.Height,
                AVPixelFormat.AV_PIX_FMT_RGB24,
                testBmp.Size.Width, testBmp.Size.Height,
                AVPixelFormat.AV_PIX_FMT_YUV420P);

            var i420Converter = new VideoFrameConverter(
                testBmp.Size.Width, testBmp.Size.Height,
                AVPixelFormat.AV_PIX_FMT_YUV420P,
                testBmp.Size.Width, testBmp.Size.Height,
                AVPixelFormat.AV_PIX_FMT_RGB24);

            BitmapData bmpData    = testBmp.LockBits(new Rectangle(0, 0, w, h), ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
            IntPtr     bmpDataPtr = bmpData.Scan0;
            int        stride     = bmpData.Stride;

            byte[] bmpBuffer = new byte[Math.Abs(stride * h)];
            Marshal.Copy(bmpDataPtr, bmpBuffer, 0, bmpBuffer.Length);
            testBmp.UnlockBits(bmpData);

            Console.WriteLine($"Test pattern stride {stride}.");

            // Convert bitmap to i420.
            var i420Buffer = rgbToi420.ConvertToBuffer(bmpBuffer);

            Console.WriteLine($"Converted rgb to i420 buffer, length {i420Buffer.Length}.");

            // Convert i420 back to bmp.
            var outRgb = i420Converter.ConvertToBuffer(i420Buffer);

            Console.WriteLine($"Converted i420 to rgb buffer, length {outRgb.Length}.");

            unsafe
            {
                fixed(byte *s = outRgb)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(w, h, outRgb.Length / h, PixelFormat.Format24bppRgb, (IntPtr)s);
                    bmpImage.Save("testpattern-result.bmp");
                    bmpImage.Dispose();
                }
            }
        }
Ejemplo n.º 6
0
        private static void InitialiseTestPattern()
        {
            _testPattern = new Bitmap(TEST_PATTERN_IMAGE_PATH);

            FFmpegInit.Initialise(FfmpegLogLevelEnum.AV_LOG_DEBUG);

            //_ffmpegEncoder = new VideoEncoder(AVCodecID.AV_CODEC_ID_VP8, _testPattern.Width, _testPattern.Height, FRAMES_PER_SECOND);
            _ffmpegEncoder = new VideoEncoder(AVCodecID.AV_CODEC_ID_H264, _testPattern.Width, _testPattern.Height, FRAMES_PER_SECOND);
            Console.WriteLine($"Codec name {_ffmpegEncoder.GetCodecName()}.");

            _videoFrameConverter = new VideoFrameConverter(
                new Size(_testPattern.Width, _testPattern.Height),
                AVPixelFormat.AV_PIX_FMT_BGRA,
                new Size(_testPattern.Width, _testPattern.Height),
                AVPixelFormat.AV_PIX_FMT_YUV420P);
        }
Ejemplo n.º 7
0
    private Texture2D DecodeFrameToTexture2D(String filename, int frameIndex = 10,
                                             AVHWDeviceType HWDevice         = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
    {
        using (var vsd = new VideoStreamDecoder(filename, HWDevice))
        {
            Debug.Log($"codec name: {vsd.CodecName}");

            var info = vsd.GetContextInfo();
            info.ToList().ForEach(x => Debug.Log($"{x.Key} = {x.Value}"));

            var sourceSize        = vsd.FrameSize;
            var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE
                ? vsd.PixelFormat
                : GetHWPixelFormat(HWDevice);
            var destinationSize        = sourceSize;
            var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
            using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize,
                                                     destinationPixelFormat))
            {
                var currentFrame = 0;

                while (vsd.TryDecodeNextFrame(out var frame) && _isRunning)
                {
                    Debug.Log($"Processing frame: {currentFrame}");
                    var avframe = vfc.Convert(frame);
                    if (OnFrameRendered != null)
                    {
                        byte[] imageData;
                        vsd.AvFrameToImageByteArray(avframe, out imageData);
                        OnFrameRendered(imageData);
                    }

                    if (currentFrame == frameIndex)
                    {
                        Debug.Log($"Saving frame: {frameIndex}");
                        return(vsd.AVFrameToTexture2D(avframe));
                    }

                    currentFrame++;
                }

                return(new Texture2D(4, 4));
            }
        }
    }
Ejemplo n.º 8
0
        private unsafe void DecodeAllFramesToImages(object state)
        {
            try
            {
                using (var decoder = new VideoStreamDecoder(url, videoInputType))
                {
                    videoInfo = decoder.GetVideoInfo();

                    var info = decoder.GetContextInfo();
                    info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                    var sourceSize             = decoder.FrameSize;
                    var sourcePixelFormat      = hwDeviceType == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? decoder.PixelFormat : GetHWPixelFormat(hwDeviceType);
                    var destinationSize        = sourceSize;
                    var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;

                    using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                    {
                        while (decoder.TryDecodeNextFrame(out var frame) && isDecodingEvent.WaitOne())
                        {
                            var convertedFrame = vfc.Convert(frame);

                            Bitmap bt = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]);

                            if (isEncodingThreadRunning)
                            {
                                decodedFrameQueue.Enqueue(convertedFrame);
                            }

                            BitmapToImageSource(bt);
                        }
                    }
                }
            }
            catch (ApplicationException e)
            {
                Console.WriteLine(e.Message);
            }
            catch (ObjectDisposedException e)
            {
                Console.WriteLine(e.Message);
            }
        }
Ejemplo n.º 9
0
        public static IEnumerable <(PointerBitmap bitmap, VideoFrameState state)> DecodeFrames(string url, AVHWDeviceType HWDevice)
        {
            _EnsureBinariesAreSet();

            using (var vsd = new VideoStreamDecoder(url, HWDevice))
            {
                var info  = GetDecoderInfo(vsd);
                var state = new Dictionary <string, long>();

                var context = new VideoFrameState(info, state);

                var sourceSize             = vsd.FrameSize;
                var sourcePixelFormat      = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
                var destinationSize        = sourceSize;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;

                long index = 0;

                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    while (vsd.TryDecodeNextFrame(out var frame))
                    {
                        var convertedFrame = vfc.Convert(frame);

                        state["index"] = index;

                        state["pts"] = frame.pts;
                        // state["pkt_pts"] = frame.pkt_pts;
                        state["pkt_dts"] = frame.pkt_dts;
                        state["best_effort_timestamp"] = frame.best_effort_timestamp;

                        state["display_picture_number"] = frame.display_picture_number;
                        state["coded_picture_number"]   = frame.coded_picture_number;
                        state["decode_error_flags"]     = frame.decode_error_flags;

                        yield return(AsPointerBitmap(convertedFrame), context);

                        ++index;
                    }
                }
            }
        }
Ejemplo n.º 10
0
        private unsafe void EncodeImagesToH264()
        {
            while (pauseEvent.WaitOne())
            {
                if (decodedFrameQueue.TryDequeue(out queueFrame))
                {
                    var sourcePixelFormat      = AVPixelFormat.AV_PIX_FMT_BGR24;
                    var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P; //for h.264

                    using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                    {
                        var convertedFrame = vfc.Convert(queueFrame);
                        convertedFrame.pts = frameNumber * 2;       //to do
                        h264Encoder.TryEncodeNextPacket(convertedFrame);
                    }

                    frameNumber++;
                }
            }
        }
Ejemplo n.º 11
0
 private unsafe void DecodeAllFramesToImages(string url)
 {
     using (VideoStreamDecoder vsd = new VideoStreamDecoder(url))
     {
         IReadOnlyDictionary <string, string> info = vsd.GetContextInfo();
         System.Drawing.Size sourceSize            = vsd.FrameSize;
         AVPixelFormat       sourcePixelFormat     = vsd.PixelFormat;
         Size          destinationSize             = sourceSize;
         AVPixelFormat destinationPixelFormat      = AVPixelFormat.AV_PIX_FMT_BGR24;
         using (VideoFrameConverter vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
         {
             AVFrame frame = default(AVFrame);
             vsd.TryDecodeNextFrame(out frame);
             AVFrame convertedFrame = vfc.Convert(frame);
             using (Bitmap bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0u], PixelFormat.Format24bppRgb, (IntPtr)(void *)convertedFrame.data[0u]))
             {
                 this.Image = Imaging.CreateBitmapSourceFromHBitmap(bitmap.GetHbitmap(), IntPtr.Zero, System.Windows.Int32Rect.Empty, System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());
             }
         }
     }
 }
Ejemplo n.º 12
0
        private unsafe void DecodeAllFramesToImages()
        {
            //video="웹캠 디바이스 이름"
            //string url = "video=AVerMedia GC550 Video Capture";

            //sample rtsp source
            //string url = "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov";

            using (var vsd = new VideoStreamDecoder(url, type))
            {
                var info = vsd.GetContextInfo();
                enCodecInfo = vsd.GetCodecInfo();

                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                sourceSize      = vsd.FrameSize;
                destinationSize = sourceSize;
                var sourcePixelFormat      = vsd.PixelFormat;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;

                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    while (vsd.TryDecodeNextFrame(out var frame) && activeThread)
                    {
                        var convertedFrame = vfc.Convert(frame);

                        Bitmap bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]);

                        if (activeEncodingThread)
                        {
                            decodedFrameQueue.Enqueue(convertedFrame);
                        }

                        //display video image
                        BitmapToImageSource(bitmap);
                    }
                }
            }
        }
Ejemplo n.º 13
0
        private static void RoundTripNoEncoding()
        {
            int width  = 32;
            int height = 32;

            var rgbToi420 = new VideoFrameConverter(
                new Size(width, height),
                AVPixelFormat.AV_PIX_FMT_RGB24,
                new Size(width, height),
                AVPixelFormat.AV_PIX_FMT_YUV420P);

            var i420Converter = new VideoFrameConverter(
                new Size(width, height),
                AVPixelFormat.AV_PIX_FMT_YUV420P,
                new Size(width, height),
                AVPixelFormat.AV_PIX_FMT_RGB24);


            // Create dummy bitmap.
            byte[] srcRgb = new byte[width * height * 3];
            for (int row = 0; row < 32; row++)
            {
                for (int col = 0; col < 32; col++)
                {
                    int index = row * width * 3 + col * 3;

                    int red   = (row < 16 && col < 16) ? 255 : 0;
                    int green = (row < 16 && col > 16) ? 255 : 0;
                    int blue  = (row > 16 && col < 16) ? 255 : 0;

                    srcRgb[index]     = (byte)red;
                    srcRgb[index + 1] = (byte)green;
                    srcRgb[index + 2] = (byte)blue;
                }
            }

            //Console.WriteLine(srcRgb.HexStr());

            unsafe
            {
                fixed(byte *src = srcRgb)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, srcRgb.Length / height, PixelFormat.Format24bppRgb, (IntPtr)src);
                    bmpImage.Save("test-source.bmp");
                    bmpImage.Dispose();
                }
            }

            // Convert bitmap to i420.
            var i420Frame = rgbToi420.Convert(srcRgb);

            Console.WriteLine($"Converted rgb to i420.");

            byte[] i420Buffer = new byte[width * height * 3 / 2];

            unsafe
            {
                int size = width * height;
                Marshal.Copy((IntPtr)i420Frame.data[0], i420Buffer, 0, size);
                Marshal.Copy((IntPtr)i420Frame.data[1], i420Buffer, size, size / 4);
                Marshal.Copy((IntPtr)i420Frame.data[2], i420Buffer, size + size / 4, size / 4);

                Console.WriteLine($"Attempting to convert i420 to rgb.");

                var rgbaFrame = i420Converter.Convert(i420Buffer);

                byte[] rgbaBuffer = new byte[width * height * 3];

                Marshal.Copy((IntPtr)rgbaFrame.data[0], rgbaBuffer, 0, width * height * 3);

                fixed(byte *s = rgbaBuffer)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, rgbaBuffer.Length / height, PixelFormat.Format24bppRgb, (IntPtr)s);
                    bmpImage.Save("test-result.bmp");
                    bmpImage.Dispose();
                }
            }
        }
Ejemplo n.º 14
0
        public static unsafe byte[]? GetThumbnail(FfmpegSettings settings, bool extendedLogging)
        {
            try {
                if (UseNativeBinding)
                {
                    bool isGrayByte = settings.GrayScale == 1;

                    AVHWDeviceType HWDevice = HardwareAccelerationMode switch {
                        FFHardwareAccelerationMode.vdpau => AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU,
                        FFHardwareAccelerationMode.dxva2 => AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
                        FFHardwareAccelerationMode.vaapi => AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI,
                        FFHardwareAccelerationMode.qsv => AVHWDeviceType.AV_HWDEVICE_TYPE_QSV,
                        FFHardwareAccelerationMode.cuda => AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
                        _ => AVHWDeviceType.AV_HWDEVICE_TYPE_NONE
                    };

                    using var vsd = new VideoStreamDecoder(settings.File, HWDevice);
                    if (vsd.PixelFormat < 0 || vsd.PixelFormat >= AVPixelFormat.AV_PIX_FMT_NB)
                    {
                        throw new Exception($"Invalid source pixel format");
                    }

                    Size          sourceSize             = vsd.FrameSize;
                    Size          destinationSize        = isGrayByte ? new Size(16, 16) : new Size(100, Convert.ToInt32(sourceSize.Height * (100 / (double)sourceSize.Width)));
                    AVPixelFormat destinationPixelFormat = isGrayByte ? AVPixelFormat.AV_PIX_FMT_GRAY8 : AVPixelFormat.AV_PIX_FMT_BGRA;
                    using var vfc =
                              new VideoFrameConverter(sourceSize, vsd.PixelFormat, destinationSize, destinationPixelFormat);

                    if (!vsd.TryDecodeFrame(out var srcFrame, settings.Position))
                    {
                        throw new Exception($"Failed decoding frame at {settings.Position}");
                    }
                    AVFrame convertedFrame = vfc.Convert(srcFrame);

                    if (isGrayByte)
                    {
                        int length = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, convertedFrame.width,
                                                                     convertedFrame.height, 1).ThrowExceptionIfError();
                        byte[] data = new byte[length];
                        Marshal.Copy((IntPtr)convertedFrame.data[0], data, 0, length);
                        return(data);
                    }
                    else
                    {
                        int width      = convertedFrame.width;
                        int height     = convertedFrame.height;
                        var totalBytes = width * height * 4;
                        var rgbaBytes  = new byte[totalBytes];
                        int stride     = convertedFrame.linesize[0];
                        if (stride == width * 4)
                        {
                            Marshal.Copy((IntPtr)convertedFrame.data[0], rgbaBytes, 0, totalBytes);
                        }
                        else
                        {
                            var sourceOffset = 0;
                            var destOffset   = 0;
                            var byteWidth    = width * 4;
                            for (var y = 0; y < height; y++)
                            {
                                Marshal.Copy((IntPtr)convertedFrame.data[0] + sourceOffset, rgbaBytes, destOffset, byteWidth);
                                sourceOffset += stride;
                                destOffset   += byteWidth;
                            }
                        }
                        var image = Image.LoadPixelData <SixLabors.ImageSharp.PixelFormats.Bgra32>(rgbaBytes, width, height);
                        using MemoryStream stream = new();
                        image.Save(stream, new SixLabors.ImageSharp.Formats.Jpeg.JpegEncoder());
                        bool equal = rgbaBytes.SequenceEqual(stream.ToArray());
                        return(stream.ToArray());
                    }
                }
            }
            catch (Exception e) {
                Logger.Instance.Info($"Failed using native FFmpeg binding on '{settings.File}', try switching to process mode. Exception: {e}");
            }


            //https://docs.microsoft.com/en-us/dotnet/csharp/how-to/concatenate-multiple-strings#string-literals
            string ffmpegArguments = $" -hide_banner -loglevel {(extendedLogging ? "error" : "quiet")}" +
                                     $" -y -hwaccel {HardwareAccelerationMode} -ss {settings.Position} -i \"{settings.File}\"" +
                                     $" -t 1 -f {(settings.GrayScale == 1 ? "rawvideo -pix_fmt gray" : "mjpeg")} -vframes 1" +
                                     $" {(settings.GrayScale == 1 ? "-s 16x16" : "-vf scale=100:-1")} {CustomFFArguments} \"-\"";

            using var process = new Process {
                      StartInfo = new ProcessStartInfo {
                          Arguments              = ffmpegArguments,
                          FileName               = FFmpegPath,
                          CreateNoWindow         = true,
                          RedirectStandardInput  = false,
                          RedirectStandardOutput = true,
                          WorkingDirectory       = Path.GetDirectoryName(FFmpegPath) !,
                          RedirectStandardError  = extendedLogging,
                          WindowStyle            = ProcessWindowStyle.Hidden
                      }
                  };
            string errOut = string.Empty;

            byte[]? bytes = null;
            try {
                process.EnableRaisingEvents = true;
                process.Start();
                if (extendedLogging)
                {
                    process.ErrorDataReceived += new DataReceivedEventHandler((sender, e) => {
                        if (e.Data?.Length > 0)
                        {
                            errOut += Environment.NewLine + e.Data;
                        }
                    });
                    process.BeginErrorReadLine();
                }
                using var ms = new MemoryStream();
                process.StandardOutput.BaseStream.CopyTo(ms);

                if (!process.WaitForExit(TimeoutDuration))
                {
                    throw new TimeoutException($"FFmpeg timed out on file: {settings.File}");
                }
                else if (extendedLogging)
                {
                    process.WaitForExit();                     // Because of asynchronous event handlers, see: https://github.com/dotnet/runtime/issues/18789
                }
                if (process.ExitCode != 0)
                {
                    throw new FFInvalidExitCodeException($"FFmpeg exited with: {process.ExitCode}");
                }

                bytes = ms.ToArray();
                if (bytes.Length == 0)
                {
                    bytes = null;                       // Makes subsequent checks easier
                }
                else if (settings.GrayScale == 1 && bytes.Length != 256)
                {
                    bytes   = null;
                    errOut += $"{Environment.NewLine}graybytes length != 256";
                }
            }
            catch (Exception e) {
                errOut += $"{Environment.NewLine}{e.Message}";
                try {
                    if (process.HasExited == false)
                    {
                        process.Kill();
                    }
                }
                catch { }
                bytes = null;
            }
            if (bytes == null || errOut.Length > 0)
            {
                string message = $"{((bytes == null) ? "ERROR: Failed to retrieve" : "WARNING: Problems while retrieving")} {(settings.GrayScale == 1 ? "graybytes" : "thumbnail")} from: {settings.File}";
                if (extendedLogging)
                {
                    message += $":{Environment.NewLine}{FFmpegPath} {ffmpegArguments}";
                }
                Logger.Instance.Info($"{message}{errOut}");
            }
            return(bytes);
        }
Ejemplo n.º 15
0
        private static void RoundTripNoEncodingDummyBitmap()
        {
            int  width  = 32;
            int  height = 32;
            Size sz     = new Size(width, height);

            var rgbToi420 = new VideoFrameConverter(
                width, height,
                AVPixelFormat.AV_PIX_FMT_RGB24,
                width, height,
                AVPixelFormat.AV_PIX_FMT_YUV420P);

            var i420Converter = new VideoFrameConverter(
                width, height,
                AVPixelFormat.AV_PIX_FMT_YUV420P,
                width, height,
                AVPixelFormat.AV_PIX_FMT_RGB24);

            // Create dummy bitmap.
            byte[] srcRgb = new byte[width * height * 3];
            for (int row = 0; row < 32; row++)
            {
                for (int col = 0; col < 32; col++)
                {
                    int index = row * width * 3 + col * 3;

                    int red   = (row < 16 && col < 16) ? 255 : 0;
                    int green = (row < 16 && col > 16) ? 255 : 0;
                    int blue  = (row > 16 && col < 16) ? 255 : 0;

                    srcRgb[index]     = (byte)red;
                    srcRgb[index + 1] = (byte)green;
                    srcRgb[index + 2] = (byte)blue;
                }
            }

            unsafe
            {
                fixed(byte *src = srcRgb)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, srcRgb.Length / height, PixelFormat.Format24bppRgb, (IntPtr)src);
                    bmpImage.Save("test-source.bmp");
                    bmpImage.Dispose();
                }
            }

            // Convert bitmap to i420.
            var i420Buffer = rgbToi420.ConvertToBuffer(srcRgb);

            Console.WriteLine($"Converted rgb to i420 buffer, length {i420Buffer.Length}.");

            // Convert i420 back to bmp.
            var outRgb = i420Converter.ConvertToBuffer(i420Buffer);

            Console.WriteLine($"Converted i420 to rgb buffer, length {outRgb.Length}.");

            unsafe
            {
                fixed(byte *s = outRgb)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, outRgb.Length / height, PixelFormat.Format24bppRgb, (IntPtr)s);
                    bmpImage.Save("test-result.bmp");
                    bmpImage.Dispose();
                }
            }
        }
Ejemplo n.º 16
0
        internal void Decode(byte[] h264Data)
        {
            //加入速率计数器
            Client.BitAverageNumber.Push(h264Data.Length);
            var curSize = h264Data.Length;
            var curPtr  = (byte *)ffmpeg.av_malloc((ulong)curSize);

            try
            {
                for (var i = 0; i < curSize; i++)
                {
                    curPtr[i] = h264Data[i];
                }

                while (curSize > 0)
                {
                    AVPacket packet;
                    if (Firstpacket)
                    {
                        packet.size = curSize;
                        packet.data = curPtr;
                        ffmpeg.av_init_packet(&packet);
                        Firstpacket = false;
                    }


                    var len = ffmpeg.av_parser_parse2(_pCodecParserCtx, _pCodecCtx,
                                                      &packet.data, &packet.size, curPtr, curSize,
                                                      ffmpeg.AV_NOPTS_VALUE, ffmpeg.AV_NOPTS_VALUE, ffmpeg.AV_NOPTS_VALUE);

                    curSize -= len;
                    if (packet.size == 0)
                    {
                        continue;
                    }

                    ffmpeg.avcodec_send_packet(_pCodecCtx, &packet);

                    var ret = ffmpeg.avcodec_receive_frame(_pCodecCtx, _pFrame);

                    while (ret >= 0)
                    {
                        using (var vfc = new VideoFrameConverter(new Size(_pFrame->width, _pFrame->height),
                                                                 _pCodecCtx->pix_fmt, new Size(_pFrame->width, _pFrame->height),
                                                                 AVPixelFormat.AV_PIX_FMT_BGR24))
                        {
                            var convertedFrame = vfc.Convert(*_pFrame);

                            //调用事件
                            OnDecodeBitmapSource?.Invoke(this, convertedFrame.width, convertedFrame.height,
                                                         convertedFrame.linesize[0], (IntPtr)convertedFrame.data[0]);

                            //释放内存吗?
                            ffmpeg.av_frame_unref(&convertedFrame);
                        }


                        ret = ffmpeg.avcodec_receive_frame(_pCodecCtx, _pFrame);
                    }

                    //释放内存吗?
                    ffmpeg.av_packet_unref(&packet);
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
            finally
            {
                ffmpeg.av_free(curPtr);
            }
        }
Ejemplo n.º 17
0
    private static unsafe bool DrawVideo(Stream videoStream, ThumbnailsRenderContext ctx)
    {
        using var formatContext = new FormatContext(videoStream);
        var stream = formatContext.FindBestVideoStream();

        if (stream == null)
        {
            return(false);
        }

        using var videoStreamDecoder = stream.CreateStreamDecoder();

        try
        {
            if (videoStreamDecoder.Duration <= 0)
            {
                videoStreamDecoder.SeekFrame(10 * 1000000);
            }

            if (videoStreamDecoder.Duration > 3)
            {
                videoStreamDecoder.SeekFrame(videoStreamDecoder.Duration / 3);
            }
        }
        catch (FFmpegException err)
        {
            Console.WriteLine("Seek failed: " + err);
        }

        var destinationSize = ThumbnailUtils.ContainSize(
            new SKSize(videoStreamDecoder.FrameWidth, videoStreamDecoder.FrameHeight),
            new SKSize(ThumbnailUtils.DefaultMaxWidth * ctx.Density, ThumbnailUtils.DefaultMaxHeight * ctx.Density)).ToSizeI();

        var sourcePixelFormat = videoStreamDecoder.PixelFormat;

        if (!videoStreamDecoder.MoveNext())
        {
            throw new InvalidDataException("Can't decode the video.");
        }

        using var vfc =
                  new VideoFrameConverter(
                      videoStreamDecoder.FrameWidth,
                      videoStreamDecoder.FrameHeight,
                      sourcePixelFormat,
                      destinationSize.Width,
                      destinationSize.Height);

        var convertedFrame = vfc.Convert(videoStreamDecoder.Current.Value);

        using var colorspace = SKColorSpace.CreateSrgb();

        var sourceImageInfo = new SKImageInfo(
            convertedFrame.width,
            convertedFrame.height,
            SKColorType.Rgba8888,
            SKAlphaType.Unpremul,
            colorspace);

        using var image =
                  SKImage.FromPixels(sourceImageInfo, (IntPtr)convertedFrame.data[0], sourceImageInfo.RowBytes);

        _cachedDecorationImage ??= SKImage.FromEncodedData(ReadDecorationImage());
        ThumbnailUtils.DrawShadowView(
            ctx,
            new SkImageView(image),
            _cachedDecorationImage,
            new SKColor(0, 0, 0),
            minSize: new SKSize(24, 24));
        return(true);
    }