Example #1
0
    //wait for KinectManager to completely update first
    void LateUpdate()
    {
        //TODO Your code here
//        if (Input.GetKeyDown(KeyCode.R))
//        {
//            showPaused();
//        }
        if (bodies.Count > 0)
        {
            showCalvin();
            //some bodies, send orientation update
            GameObject thumbRight   = bodies[0].GetJoint(Windows.Kinect.JointType.ThumbRight);
            GameObject handRight    = bodies[0].GetJoint(Windows.Kinect.JointType.HandRight);
            GameObject handTipRight = bodies[0].GetJoint(Windows.Kinect.JointType.HandTipRight);

            float wristRotation = KinectCVUtilities.VerticalWristRotation(
                handTipRight.transform.localPosition,
                handRight.transform.localPosition,
                thumbRight.transform.localPosition
                );

            //update the rotation
            this.transform.rotation = Quaternion.Euler(0, wristRotation, 0);
        }
    }
Example #2
0
    //wait for KinectManager to completely update first
    void LateUpdate()
    {
        //If player is in room
        if (bodies.Count > 0)
        {
            //some bodies, send orientation update
            GameObject thumbRight   = bodies[0].GetJoint(Windows.Kinect.JointType.ThumbRight);
            GameObject handRight    = bodies[0].GetJoint(Windows.Kinect.JointType.HandRight);
            GameObject handTipRight = bodies[0].GetJoint(Windows.Kinect.JointType.HandTipRight);

            float wristRotation = KinectCVUtilities.VerticalWristRotation(
                handTipRight.transform.localPosition,
                handRight.transform.localPosition,
                thumbRight.transform.localPosition
                );

            //update the rotation
            this.transform.rotation = Quaternion.Euler(0, wristRotation, 0);
        }
    }
Example #3
0
    //wait for KinectManager to completely update first
    void LateUpdate()
    {
        //TODO Your code here
        if (bodies.Count > 0)
        {
            //some bodies, send orientation update
            GameObject thumbRight   = bodies[0].GetJoint(Windows.Kinect.JointType.ThumbRight);
            GameObject handRight    = bodies[0].GetJoint(Windows.Kinect.JointType.HandRight);
            GameObject handTipRight = bodies[0].GetJoint(Windows.Kinect.JointType.HandTipRight);

            float wristRotation = KinectCVUtilities.VerticalWristRotation(
                handTipRight.transform.localPosition,
                handRight.transform.localPosition,
                thumbRight.transform.localPosition
                );

            //send the rotation
            Message wristRotationMessage = new Message("WristRotation");
            wristRotationMessage.AddField("angle", "" + wristRotation);
            wristRotationMessage.DeliverToSelf = true;
            networkItClient.SendMessage(wristRotationMessage);
        }
    }
    private void DemoFaceTrack()
    {
        int ColorWidth  = kinectManager.ColorWidth;
        int ColorHeight = kinectManager.ColorHeight;

        Mat colorImage = new Mat(kinectManager.ColorHeight, ColorWidth, MatType.CV_8UC4, kinectManager.ColorRawData);              //rows=height, cols=width
        Mat grayImage  = new Mat();

        Cv2.CvtColor(colorImage, grayImage, ColorConversionCodes.RGBA2GRAY);
        Cv2.EqualizeHist(grayImage, grayImage);

        OpenCvSharp.Rect[] faces = cascade.DetectMultiScale(
            image: grayImage,
            scaleFactor: 1.1,
            minNeighbors: 2,
            flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
            minSize: new Size(30, 30)
            );


        for (int i = 0; i < faces.Length; i++)
        {
            OpenCvSharp.Rect faceRect = faces[i];

            //outline overall face in image
            var rndColor = Scalar.FromRgb(
                UnityEngine.Random.Range(0, 255),
                UnityEngine.Random.Range(0, 255),
                UnityEngine.Random.Range(0, 255)
                );
            Cv2.Rectangle(colorImage, faceRect, rndColor, 3);

            //now do nested features like the eyes
            Mat subFaceImage          = new Mat(colorImage, faceRect);
            Mat detectedFaceGrayImage = new Mat();
            Cv2.CvtColor(subFaceImage, detectedFaceGrayImage, ColorConversionCodes.RGBA2GRAY);
            var nestedObjects = nestedCascade.DetectMultiScale(
                image: detectedFaceGrayImage,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new Size(30, 30)
                );

            //display each nested face feature
            foreach (var nestedObject in nestedObjects)
            {
                var center = new Point
                {
                    X = (int)(Math.Round(nestedObject.X + nestedObject.Width * 0.5, MidpointRounding.ToEven) + faceRect.Left),
                    Y = (int)(Math.Round(nestedObject.Y + nestedObject.Height * 0.5, MidpointRounding.ToEven) + faceRect.Top)
                };
                var radius = Math.Round((nestedObject.Width + nestedObject.Height) * 0.25, MidpointRounding.ToEven);
                Cv2.Circle(colorImage, center, (int)radius, rndColor, thickness: 3);
            }
        }


        //Convert back to RGBA32
        //Mat faceImageOut = new Mat(IRWidth, IRHeight, MatType.CV_8UC4);
        //Cv2.CvtColor(colorImage, faceImageOut, ColorConversionCodes.RGBA2RGBA);

        //load onto texture
        byte[] rawTextureBytes = KinectCVUtilities.ConvertMatToBytes(colorImage);
        kinectManager.ColorTexture.LoadRawTextureData(rawTextureBytes);
        kinectManager.ColorTexture.Apply();
    }
    private void DemoIRBlobTrack()
    {
        int IRWidth  = kinectManager.IRWidth;
        int IRHeight = kinectManager.IRHeight;

        //get image and convert to threshold image
        Mat irImage = new Mat(IRHeight, IRWidth, MatType.CV_8UC4, kinectManager.IRRawData);              //rows=height, cols=width
        Mat ir8Bit  = new Mat();

        Cv2.CvtColor(irImage, ir8Bit, ColorConversionCodes.RGBA2GRAY);
        Cv2.Threshold(ir8Bit, ir8Bit, thresh: 200, maxval: 255, type: ThresholdTypes.Binary);

        //Find blobs
        SimpleBlobDetector.Params detectorParams = new SimpleBlobDetector.Params
        {
            //MinDistBetweenBlobs = 10, // 10 pixels between blobs
            //MinRepeatability = 1,

            //MinThreshold = 100,
            //MaxThreshold = 255,
            //ThresholdStep = 5,

            FilterByArea = false,
            //FilterByArea = true,
            //MinArea = 0.001f, // 10 pixels squared
            //MaxArea = 500,

            FilterByCircularity = false,
            //FilterByCircularity = true,
            //MinCircularity = 0.001f,

            FilterByConvexity = false,
            //FilterByConvexity = true,
            //MinConvexity = 0.001f,
            //MaxConvexity = 10,

            FilterByInertia = false,
            //FilterByInertia = true,
            //MinInertiaRatio = 0.001f,

            FilterByColor = false
                            //FilterByColor = true,
                            //BlobColor = 255 // to extract light blobs
        };

        SimpleBlobDetector simpleBlobDetector = SimpleBlobDetector.Create(detectorParams);

        KeyPoint[] blobs = simpleBlobDetector.Detect(ir8Bit);


        foreach (KeyPoint kp in blobs)
        {
            Vector2 blobPt = new Vector2(kp.Pt.X, kp.Pt.Y);

            //transform ir point to unity world space
            Vector2 irDimensions = new Vector2(kinectManager.IRWidth, kinectManager.IRHeight);
            irTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(irPlane, irDimensions, blobPt) + irOffset;


            //transform ir point to color space, then world space
            DepthSpacePoint depthPt = new DepthSpacePoint();
            depthPt.X = blobPt.x;
            depthPt.Y = blobPt.y;
            double          depth         = GetAvg(kinectManager.DepthData, (int)depthPt.X, (int)depthPt.Y, kinectManager.DepthWidth, kinectManager.DepthHeight);
            ColorSpacePoint colorMappedPt = kinectManager.Sensor.CoordinateMapper.MapDepthPointToColorSpace(depthPt, (ushort)depth);

            Vector2 colorDimensions = new Vector2(kinectManager.ColorWidth, kinectManager.ColorHeight);
            Vector2 colorPt         = new Vector2(colorMappedPt.X, colorMappedPt.Y);
            colorTrack.transform.localPosition = KinectCVUtilities.TransformTextureToUnity(colorPlane, colorDimensions, colorPt) + colorOffset;
        }


        //convert back to unity texture, add nice debug drawings
        Mat irImageKeyPoints = new Mat();

        Cv2.DrawKeypoints(ir8Bit, blobs, irImageKeyPoints, color: Scalar.FromRgb(255, 0, 0),
                          flags: DrawMatchesFlags.DrawRichKeypoints);

        //Convert back to RGBA32
        Mat irImageOut = new Mat(IRWidth, IRHeight, MatType.CV_8UC4);

        Cv2.CvtColor(irImageKeyPoints, irImageOut, ColorConversionCodes.BGR2RGBA);      //OpenCV is weird and has it in BGR format

        //load onto texture
        byte[] rawTextureData = KinectCVUtilities.ConvertMatToBytes(irImageOut);

        if (overrideIRTexture)
        {
            kinectManager.IRTexture.LoadRawTextureData(rawTextureData);
            kinectManager.IRTexture.Apply();
        }
    }