public byte[] Encode(byte[] frame, VpxImgFmt inputPixelFormat = VpxImgFmt.VPX_IMG_FMT_I420, bool forceKeyFrame = false)
        {
            if (!_isVpxImageAllocated)
            {
                _isVpxImageAllocated = true;
                VpxImage.VpxImgAlloc(_vpxEncodeImg, inputPixelFormat, _encodeWidth, _encodeHeight, 1);
            }

            byte[] encodedSample = null;

            unsafe
            {
                fixed(byte *pFrame = frame)
                {
                    VpxImage.VpxImgWrap(_vpxEncodeImg, inputPixelFormat, _encodeWidth, _encodeHeight, 1, pFrame);

                    int flags = (forceKeyFrame) ? VPX_EFLAG_FORCE_KF : 0;

                    var encodeRes = vpx_encoder.VpxCodecEncode(_vpxEncodeCtx, _vpxEncodeImg, 1, 1, flags, VPX_DL_REALTIME);

                    if (encodeRes != VpxCodecErrT.VPX_CODEC_OK)
                    {
                        throw new ApplicationException($"VP8 encode attempt failed, {vpx_codec.VpxCodecErrToString(encodeRes)}.");
                    }

                    IntPtr iter = IntPtr.Zero;

                    var pkt = vpx_encoder.VpxCodecGetCxData(_vpxEncodeCtx, (void **)&iter);

                    while (pkt != null)
                    {
                        switch (pkt.Kind)
                        {
                        case VpxCodecCxPktKind.VPX_CODEC_CX_FRAME_PKT:
                            //Console.WriteLine($"is key frame={(pkt.data.frame.Flags & VPX_FRAME_IS_KEY) > 0}, length {pkt.data.Raw.Sz}.");
                            encodedSample = new byte[pkt.data.Raw.Sz];
                            Marshal.Copy(pkt.data.Raw.Buf, encodedSample, 0, encodedSample.Length);
                            break;

                        default:
                            throw new ApplicationException($"Unexpected packet type received from encoder, {pkt.Kind}.");
                        }

                        pkt = vpx_encoder.VpxCodecGetCxData(_vpxEncodeCtx, (void **)&iter);
                    }
                }
            }

            return(encodedSample);
        }
        // Setting config parameters in Chromium source.
        // https://chromium.googlesource.com/external/webrtc/stable/src/+/b8671cb0516ec9f6c7fe22a6bbe331d5b091cdbb/modules/video_coding/codecs/vp8/vp8.cc
        // Updated link 15 Jun 2020.
        // https://chromium.googlesource.com/external/webrtc/stable/src/+/refs/heads/master/modules/video_coding/codecs/vp8/vp8_impl.cc
        public void InitialiseEncoder(uint width, uint height)
        {
            _encodeWidth  = width;
            _encodeHeight = height;

            _vpxEncodeCtx = new VpxCodecCtx();
            _vpxEncodeImg = new VpxImage();

            VpxCodecEncCfg vp8EncoderCfg = new VpxCodecEncCfg();

            var setConfigRes = vpx_encoder.VpxCodecEncConfigDefault(vp8cx.VpxCodecVp8Cx(), vp8EncoderCfg, 0);

            if (setConfigRes != VpxCodecErrT.VPX_CODEC_OK)
            {
                throw new ApplicationException($"Failed to set VP8 encoder configuration to default values, {setConfigRes}.");
            }

            vp8EncoderCfg.GW = _encodeWidth;
            vp8EncoderCfg.GH = _encodeHeight;

            //	vpxConfig.g_w = width;
            //	vpxConfig.g_h = height;
            //	vpxConfig.rc_target_bitrate = _rc_target_bitrate;//  300; // 5000; // in kbps.
            //	vpxConfig.rc_min_quantizer = _rc_min_quantizer;// 20; // 50;
            //	vpxConfig.rc_max_quantizer = _rc_max_quantizer;// 30; // 60;
            //	vpxConfig.g_pass = VPX_RC_ONE_PASS;
            //	if (_rc_is_cbr)
            //	{
            //		vpxConfig.rc_end_usage = VPX_CBR;
            //	}
            //	else
            //	{
            //		vpxConfig.rc_end_usage = VPX_VBR;
            //	}

            //	vpxConfig.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
            //	vpxConfig.g_lag_in_frames = 0;
            //	vpxConfig.rc_resize_allowed = 0;
            //	vpxConfig.kf_max_dist = 20;

            var initEncoderRes = vpx_encoder.VpxCodecEncInitVer(_vpxEncodeCtx, vp8cx.VpxCodecVp8Cx(), vp8EncoderCfg, 0, VPX_ENCODER_ABI_VERSION);

            if (initEncoderRes != VpxCodecErrT.VPX_CODEC_OK)
            {
                throw new ApplicationException($"Failed to initialise VP8 encoder, {vpx_codec.VpxCodecErrToString(initEncoderRes)}.");
            }

            //VpxImage.VpxImgAlloc(_vpxEncodeImg, VpxImgFmt.VPX_IMG_FMT_I420, _encodeWidth, _encodeHeight, 1);
            VpxImage.VpxImgAlloc(_vpxEncodeImg, VpxImgFmt.VPX_IMG_FMT_NV12, _encodeWidth, _encodeHeight, 1);
        }
        public void Dispose()
        {
            _isDisposing = true;

            if (_vpxEncodeCtx != null)
            {
                vpx_codec.VpxCodecDestroy(_vpxEncodeCtx);
            }

            if (_vpxEncodeImg != null)
            {
                VpxImage.VpxImgFree(_vpxEncodeImg);
            }

            if (_vpxDecodeCtx != null)
            {
                vpx_codec.VpxCodecDestroy(_vpxDecodeCtx);
            }
        }
Esempio n. 4
0
        private void SendVideoFrame(Bitmap frame)
        {
            var bitmapData = frame.LockBits(new Rectangle(0, 0, frame.Width, frame.Height), ImageLockMode.ReadOnly, frame.PixelFormat);

            byte[] bytes = new byte[bitmapData.Stride * frame.Height];

            Marshal.Copy(bitmapData.Scan0, bytes, 0, bytes.Length);

            try
            {
                VpxImage img = VpxImage.Create(VpxImageFormat.VPX_IMG_FMT_I420, (ushort)frame.Width, (ushort)frame.Height, 1);

                byte[] dest = new byte[frame.Width * frame.Height * 4];
                VpxHelper.RgbToYuv420(img, bytes, (ushort)frame.Width, (ushort)frame.Height);

                int length = toxav.PrepareVideoFrame(CallIndex, dest, (IntPtr)img.Pointer);
                img.Free();

                if (length > 0)
                {
                    byte[] bytesToSend = new byte[length];
                    Array.Copy(dest, bytesToSend, length);

                    ToxAvError error = toxav.SendVideo(CallIndex, bytesToSend);
                    if (error != ToxAvError.None)
                    {
                        Debug.WriteLine(string.Format("Could not send video frame: {0}, {1}", error, length));
                    }
                }
                else
                {
                    Debug.WriteLine(string.Format("Could not prepare frame: {0}", (ToxAvError)length));
                }
            }
            catch
            {
                Debug.WriteLine(string.Format("Could not convert frame"));
            }

            frame.Dispose();
        }
        // https://swift.im/git/swift-contrib/tree/Swiften/ScreenSharing/VP8Decoder.cpp?id=6247ed394302ff2cf1f33a71df808bebf7241242
        public List <byte[]> Decode(byte[] buffer, int bufferSize, out uint width, out uint height)
        {
            List <byte[]> decodedBuffers = new List <byte[]>();

            width  = 0;
            height = 0;

            if (!_isDisposing)
            {
                unsafe
                {
                    fixed(byte *pBuffer = buffer)
                    {
                        var decodeRes = vpx_decoder.VpxCodecDecode(_vpxDecodeCtx, pBuffer, (uint)bufferSize, IntPtr.Zero, 1);

                        if (decodeRes != VpxCodecErrT.VPX_CODEC_OK)
                        {
                            // The reason not to throw an exception here is that a partial frame can easily be passed to the decoder.
                            // This will result in a decode failure but should not affect the decode of the next full frame.
                            //throw new ApplicationException($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}.");
                            logger.LogWarning($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}.");
                        }
                        else
                        {
                            IntPtr iter = IntPtr.Zero;

                            VpxImage img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter);
                            while (img != null)
                            {
                                // Convert the VPX image buffer to an I420 buffer WITHOUT the stride.
                                width  = img.DW;
                                height = img.DH;
                                int ySize   = (int)(width * height);
                                int uvSize  = (int)(((width + 1) / 2) * ((height + 1) / 2) * 2);
                                int uvWidth = (int)(width + 1) / 2;

                                var yPlane = (byte *)img.PlaneY;
                                var uPlane = (byte *)img.PlaneU;
                                var vPlane = (byte *)img.PlaneV;

                                byte[] decodedBuffer = new byte[ySize + uvSize];

                                for (int row = 0; row < height; row++)
                                {
                                    Marshal.Copy((IntPtr)(yPlane + row * img.Stride[0]), decodedBuffer, (int)(row * width), (int)width);

                                    if (row < height / 2)
                                    {
                                        Marshal.Copy((IntPtr)(uPlane + row * img.Stride[1]), decodedBuffer, ySize + row * uvWidth, uvWidth);
                                        Marshal.Copy((IntPtr)(vPlane + row * img.Stride[2]), decodedBuffer, ySize + uvSize / 2 + row * uvWidth, uvWidth);
                                    }
                                }

                                decodedBuffers.Add(decodedBuffer);

                                VpxImage.VpxImgFree(img);

                                img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter);
                            }
                        }
                    }
                }
            }

            return(decodedBuffers);
        }
Esempio n. 6
0
        // https://swift.im/git/swift-contrib/tree/Swiften/ScreenSharing/VP8Decoder.cpp?id=6247ed394302ff2cf1f33a71df808bebf7241242
        public List <byte[]> Decode(byte[] buffer, int bufferSize, out uint width, out uint height)
        {
            List <byte[]> decodedBuffers = new List <byte[]>();

            width  = 0;
            height = 0;

            if (!_isDisposing)
            {
                unsafe
                {
                    fixed(byte *pBuffer = buffer)
                    {
                        var decodeRes = vpx_decoder.VpxCodecDecode(_vpxDecodeCtx, pBuffer, (uint)bufferSize, IntPtr.Zero, 1);

                        if (decodeRes != VpxCodecErrT.VPX_CODEC_OK)
                        {
                            // The reason not to throw an exception here is that a partial frame can easily be passed to the decoder.
                            // This will result in a decode failure but should not affect the decode of the next full frame.
                            //throw new ApplicationException($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}.");
                            logger.LogWarning($"VP8 decode attempt failed, {vpx_codec.VpxCodecErrToString(decodeRes)}.");
                        }
                        else
                        {
                            IntPtr iter = IntPtr.Zero;

                            VpxImage img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter);
                            while (img != null)
                            {
                                // Convert the VPX image buffer to an I420 buffer WITHOUT the stride.
                                width  = img.DW;
                                height = img.DH;
                                int sz = (int)(width * height);

                                var yPlane = (byte *)img.PlaneY;
                                var uPlane = (byte *)img.PlaneU;
                                var vPlane = (byte *)img.PlaneV;

                                byte[] decodedBuffer = new byte[width * height * 3 / 2];

                                for (uint row = 0; row < height; row++)
                                {
                                    Marshal.Copy((IntPtr)(yPlane + row * img.Stride[0]), decodedBuffer, (int)(row * width), (int)width);

                                    if (row < height / 2)
                                    {
                                        Marshal.Copy((IntPtr)(uPlane + row * img.Stride[1]), decodedBuffer, (int)(sz + row * (width / 2)), (int)width / 2);
                                        Marshal.Copy((IntPtr)(vPlane + row * img.Stride[2]), decodedBuffer, (int)(sz + sz / 4 + row * (width / 2)), (int)width / 2);
                                    }
                                }

                                // This block converts the VPX image buffer directly to RGB24 but it's way too slow.
                                // Was taking 60 to 90ms on Win10 i7 CPU.
                                //byte[] data = new byte[width * height * 3];
                                //int i = 0;
                                //for (uint imgY = 0; imgY < height; imgY++)
                                //{
                                //    for (uint imgX = 0; imgX < width; imgX++)
                                //    {
                                //        int y = yPlane[imgY * img.Stride[0] + imgX];
                                //        int u = uPlane[(imgY / 2) * img.Stride[1] + (imgX / 2)];
                                //        int v = vPlane[(imgY / 2) * img.Stride[2] + (imgX / 2)];

                                //        int c = y - 16;
                                //        int d = (u - 128);
                                //        int e = (v - 128);

                                //        // TODO: adjust colors ?

                                //        int r = (298 * c + 409 * e + 128) >> 8;
                                //        int g = (298 * c - 100 * d - 208 * e + 128) >> 8;
                                //        int b = (298 * c + 516 * d + 128) >> 8;

                                //        r = r < 0 ? 0 : r > 255 ? 255 : r;
                                //        g = g < 0 ? 0 : g > 255 ? 255 : g;
                                //        b = b < 0 ? 0 : b > 255 ? 255 : b;

                                //        // TODO: cast instead of clamp8

                                //        data[i + 0] = (byte)(b);
                                //        data[i + 1] = (byte)(g);
                                //        data[i + 2] = (byte)(r);

                                //        i += 3;
                                //    }
                                //}
                                //decodedBuffers.Add(data);

                                decodedBuffers.Add(decodedBuffer);

                                VpxImage.VpxImgFree(img);

                                img = vpx_decoder.VpxCodecGetFrame(_vpxDecodeCtx, (void **)&iter);
                            }
                        }
                    }
                }
            }

            return(decodedBuffers);
        }