コード例 #1
0
        public void Invoke(Texture inputTex, int[] indxs, FaceDetect.Result face, FaceMesh.Result meshResult, int side)
        {
            CalcEyeRoi(face, meshResult, indxs[0], indxs[1]);

            var options = (inputTex is WebCamTexture)
                ? resizeOptions.GetModifedForWebcam((WebCamTexture)inputTex)
                : resizeOptions;

            cropMatrix = RectTransformationCalculator.CalcMatrix(new RectTransformationCalculator.Options()
            {
                rect                 = face.rectEye,
                rotationDegree       = CalcFaceRotation(ref face) * Mathf.Rad2Deg,
                shift                = FaceShift,
                scale                = FaceScale,
                cameraRotationDegree = -options.rotationDegree,
                mirrorHorizontal     = options.mirrorHorizontal,
                mirrorVertiacal      = options.mirrorVertical,
            });

            RenderTexture rt = resizer.Resize(
                inputTex, options.width, options.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, options));

            ToTensor(rt, input0, false);

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
        }
コード例 #2
0
        public void Invoke(Texture inputTex, PoseDetect.Result pose)
        {
            var options = (inputTex is WebCamTexture)
                ? resizeOptions.GetModifedForWebcam((WebCamTexture)inputTex)
                : resizeOptions;

            // float rotation = CalcRotationDegree(ref pose);
            var rect = AlignmentPointsRect(ref pose);

            cropMatrix = RectTransformationCalculator.CalcMatrix(new RectTransformationCalculator.Options()
            {
                rect                 = rect,
                rotationDegree       = 180,
                shift                = PoseShift,
                scale                = PoseScale,
                cameraRotationDegree = -options.rotationDegree,
                mirrorHorizontal     = options.mirrorHorizontal,
                mirrorVertiacal      = options.mirrorVertical,
            });

            RenderTexture rt = resizer.Resize(
                inputTex, options.width, options.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, options));

            ToTensor(rt, input0, false);

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);

            // interpreter.GetOutputTensorData(2, output2);// not in use
        }
コード例 #3
0
        public async UniTask <Result> InvokeAsync(Texture inputTex, PoseDetect.Result pose, CancellationToken cancellationToken, PlayerLoopTiming timing)
        {
            cropMatrix = CalcCropMatrix(ref pose, ref resizeOptions);
            RenderTexture rt = resizer.Resize(
                inputTex, resizeOptions.width, resizeOptions.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, resizeOptions));

            await ToTensorAsync(rt, input0, false, cancellationToken);

            await UniTask.SwitchToThreadPool();

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
            if (options.useWorldLandmarks)
            {
                interpreter.GetOutputTensorData(3, output3);
                interpreter.GetOutputTensorData(4, output4);
            }
            var result = GetResult();
            await UniTask.SwitchToMainThread(timing, cancellationToken);

            return(result);
        }
        public async UniTask <Result> InvokeAsync(Texture inputTex, PoseDetect.Result pose, bool useFilter, CancellationToken cancellationToken, PlayerLoopTiming timing)
        {
            var options = (inputTex is WebCamTexture)
                ? resizeOptions.GetModifedForWebcam((WebCamTexture)inputTex)
                : resizeOptions;

            cropMatrix = CalcCropMatrix(ref pose, ref options);
            RenderTexture rt = resizer.Resize(
                inputTex, options.width, options.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, options));

            await ToTensorAsync(rt, input0, false, cancellationToken);

            await UniTask.SwitchToThreadPool();

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);

            var result = GetResult(useFilter);
            await UniTask.SwitchToMainThread(timing, cancellationToken);

            return(result);
        }
コード例 #5
0
        public async UniTask <Result> InvokeAsync(Texture inputTex, PalmDetect.Result palm, CancellationToken cancellationToken)
        {
            cropMatrix = RectTransformationCalculator.CalcMatrix(new RectTransformationCalculator.Options()
            {
                rect                 = palm.rect,
                rotationDegree       = CalcHandRotation(ref palm) * Mathf.Rad2Deg,
                shift                = PalmShift,
                scale                = PalmScale,
                cameraRotationDegree = -resizeOptions.rotationDegree,
                mirrorHorizontal     = resizeOptions.mirrorHorizontal,
                mirrorVertiacal      = resizeOptions.mirrorVertical,
            });

            RenderTexture rt = resizer.Resize(
                inputTex, resizeOptions.width, resizeOptions.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, resizeOptions));

            await ToTensorAsync(rt, input0, false, cancellationToken);

            await UniTask.SwitchToThreadPool();

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);

            var result = GetResult();
            await UniTask.SwitchToMainThread(cancellationToken);

            return(result);
        }
コード例 #6
0
        public void Invoke(Texture inputTex, PalmDetect.Result palm)
        {
            var options = (inputTex is WebCamTexture)
                ? resizeOptions.GetModifedForWebcam((WebCamTexture)inputTex)
                : resizeOptions;

            cropMatrix = RectTransformationCalculator.CalcMatrix(new RectTransformationCalculator.Options()
            {
                rect                 = palm.rect,
                rotationDegree       = CalcHandRotation(ref palm) * Mathf.Rad2Deg,
                shift                = PalmShift,
                scale                = PalmScale,
                cameraRotationDegree = -options.rotationDegree,
                mirrorHorizontal     = options.mirrorHorizontal,
                mirrorVertiacal      = options.mirrorVertical,
            });

            RenderTexture rt = resizer.Resize(
                inputTex, options.width, options.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, options));

            ToTensor(rt, input0, false);

            //
            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
        }
コード例 #7
0
        public void Invoke(Texture inputTex, FaceDetect.Result face)
        {
            cropMatrix = RectTransformationCalculator.CalcMatrix(new RectTransformationCalculator.Options()
            {
                rect                 = face.rect,
                rotationDegree       = CalcFaceRotation(ref face) * Mathf.Rad2Deg,
                shift                = FaceShift,
                scale                = FaceScale,
                cameraRotationDegree = -resizeOptions.rotationDegree,
                mirrorHorizontal     = resizeOptions.mirrorHorizontal,
                mirrorVertiacal      = resizeOptions.mirrorVertical,
            });

            RenderTexture rt = resizer.Resize(
                inputTex, resizeOptions.width, resizeOptions.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, resizeOptions));

            ToTensor(rt, input0, false);

            //
            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
        }
コード例 #8
0
        private RenderTexture NormalizeWebcam(WebCamTexture texture, int width, int height, bool isFrontFacing)
        {
            int  cameraWidth  = texture.width;
            int  cameraHeight = texture.height;
            bool isPortrait   = IsPortrait(texture);

            if (isPortrait)
            {
                (cameraWidth, cameraHeight) = (cameraHeight, cameraWidth); // swap
            }

            float cameraAspect = (float)cameraWidth / (float)cameraHeight;
            float targetAspect = (float)width / (float)height;

            int w, h;

            if (cameraAspect > targetAspect)
            {
                w = Mathf.FloorToInt(cameraHeight * targetAspect);
                h = cameraHeight;
            }
            else
            {
                w = cameraWidth;
                h = Mathf.FloorToInt(cameraWidth / targetAspect);
            }

            Matrix4x4 mtx;
            Vector4   uvRect;
            int       rotation = texture.videoRotationAngle;

            // Seems to be bug in the android. might be fixed in the future.
            if (Application.platform == RuntimePlatform.Android)
            {
                rotation = -rotation;
            }

            if (isPortrait)
            {
                mtx    = TextureResizer.GetVertTransform(rotation, texture.videoVerticallyMirrored, isFrontFacing);
                uvRect = TextureResizer.GetTextureST(targetAspect, cameraAspect, AspectMode.Fill);
            }
            else
            {
                mtx    = TextureResizer.GetVertTransform(rotation, isFrontFacing, texture.videoVerticallyMirrored);
                uvRect = TextureResizer.GetTextureST(cameraAspect, targetAspect, AspectMode.Fill);
            }

            // Debug.Log($"camera: rotation:{texture.videoRotationAngle} flip:{texture.videoVerticallyMirrored}");
            return(resizer.Resize(texture, w, h, false, mtx, uvRect));
        }
コード例 #9
0
        public void Invoke(Texture inputTex, PalmDetect.Palm palm)
        {
            var options = resizeOptions;

            cropMatrix     = resizer.VertexTransfrom = CalcPalmMatrix(ref palm, PalmShift, PalmScale);
            resizer.UVRect = TextureResizer.GetTextureST(inputTex, options);
            RenderTexture rt = resizer.ApplyResize(inputTex, options.width, options.height);

            ToTensor(rt, input0, false);

            //
            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
        }
コード例 #10
0
        private void Start()
        {
            resizer = new TextureResizer();
            devices = WebCamTexture.devices;
            string cameraName = Application.isEditor
                ? editorCameraName
                : WebCamUtil.FindName(preferKind, isFrontFacing);

            WebCamDevice device = default;

            for (int i = 0; i < devices.Length; i++)
            {
                if (devices[i].name == cameraName)
                {
                    device      = devices[i];
                    deviceIndex = i;
                    break;
                }
            }
            StartCamera(device);
        }
コード例 #11
0
        public void Invoke(Texture inputTex, PoseDetect.Result pose)
        {
            var options = (inputTex is WebCamTexture)
                ? resizeOptions.GetModifedForWebcam((WebCamTexture)inputTex)
                : resizeOptions;

            cropMatrix = CalcCropMatrix(ref pose, ref options);

            RenderTexture rt = resizer.Resize(
                inputTex, options.width, options.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, options));

            ToTensor(rt, input0, false);

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
            // interpreter.GetOutputTensorData(2, output2);// not in use
        }
コード例 #12
0
        public void Invoke(Texture inputTex, PoseDetect.Result pose)
        {
            cropMatrix = CalcCropMatrix(ref pose, ref resizeOptions);

            RenderTexture rt = resizer.Resize(
                inputTex, resizeOptions.width, resizeOptions.height, true,
                cropMatrix,
                TextureResizer.GetTextureST(inputTex, resizeOptions));

            ToTensor(rt, input0, false);

            interpreter.SetInputTensorData(0, input0);
            interpreter.Invoke();
            interpreter.GetOutputTensorData(0, output0);
            interpreter.GetOutputTensorData(1, output1);
            // interpreter.GetOutputTensorData(2, output2);// not in use
            if (options.useWorldLandmarks)
            {
                interpreter.GetOutputTensorData(3, output3);
                interpreter.GetOutputTensorData(4, output4);
            }
        }