public UJoint(Joint joint)
    {
        Vector3 kinectSpacePosition = joint.Position.ToUnityVector3();

        Position   = AdjustScale(kinectSpacePosition);
        Rotation   = joint.Quaternion.ToUnityQuaternion();
        Confidence = joint.ConfidenceLevel;
    }
Пример #2
0
        private Brush JointConfidenceLevelToToBrush(JointConfidenceLevel confidenceLevel)
        {
            switch (confidenceLevel)
            {
            case JointConfidenceLevel.None: return(JointFillNone);

            case JointConfidenceLevel.Low: return(JointFillLow);

            default: return(JointFill);
            }
        }
        private async void StartCamera()
        {
            //Start body track and sensor camera
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                {
                    // Local variables
                    //
                    // photo for prediction
                    float[,] actualPhoto;

                    // hand position in color camera
                    Vector3 handPositionColor = new Vector3();

                    //hand position in deepth camera
                    Vector3 HandPositionDepth = Vector3.Zero;

                    //Elbow depth position
                    Vector3 ElbowPositionDepth = Vector3.Zero;

                    //tracked body id
                    uint trackedBodyId = 0;

                    // hand reading confidence level
                    JointConfidenceLevel handConf = JointConfidenceLevel.Low;

                    // elbow reading confindence level
                    JointConfidenceLevel elbowConf = JointConfidenceLevel.Low;

                    int x, y;

                    //body skeleton
                    Skeleton body;

                    while (running)
                    {
                        // create reset prediction photo
                        actualPhoto = new float[actualPhotoWidht, actualPhotoHeight];
                        for (int i = 0; i < 30; i++)
                        {
                            for (int j = 0; j < 30; j++)
                            {
                                actualPhoto[i, j] = 255;
                            }
                        }

                        //get capture
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            //get depth transform
                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);

                            //get position of the hand
                            using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                //check if we have bodies
                                if (frame != null && frame.NumberOfBodies >= 1)
                                {
                                    if (trackedBodyId <= 0)
                                    {
                                        trackedBodyId = frame.GetBody(0).Id;
                                    }

                                    //get body
                                    body = frame.GetBodySkeleton(0);

                                    //check confidence
                                    // we will use data only of confidence is medium or high
                                    handConf = body.GetJoint(JointId.HandRight).ConfidenceLevel;

                                    //skip if confidnece is not high or medium
                                    if (handConf == JointConfidenceLevel.Low || handConf == JointConfidenceLevel.None)
                                    {
                                        continue;
                                    }

                                    // get hand position
                                    HandPositionDepth = body.GetJoint(JointId.HandRight).Position;

                                    // elbow confidence and position
                                    ElbowPositionDepth = body.GetJoint(JointId.ElbowRight).Position;
                                    elbowConf          = body.GetJoint(JointId.ElbowRight).ConfidenceLevel;

                                    // get hand position in color camera
                                    var handPositionColorQ = kinect.GetCalibration().TransformTo2D(HandPositionDepth, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);

                                    handPositionColor.X = handPositionColorQ.Value.X;
                                    handPositionColor.Y = handPositionColorQ.Value.Y;
                                    handPositionColor.Z = HandPositionDepth.Z;

                                    // loop thorugh all color, and select depth that are insede the requerid square

                                    // show normal camera
                                    var color = capture.Color;

                                    // we need unsafe for use pointer to array
                                    unsafe
                                    {
                                        // get memory handler
                                        using (var pin = color.Memory.Pin())
                                        {
                                            // get pointer to depth capture, this is basically a array
                                            ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;

                                            // staring point of the hand square
                                            bool isFirst = true;
                                            int  xref = 0, yref = 0;
                                            x = 0; y = 0;
                                            //loop through the depht pixel arrat
                                            for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                            {
                                                //kepp coubnt of x and y for 2d array
                                                x++;
                                                if (i % 1280 == 0)
                                                {
                                                    x = 0;
                                                    y++;
                                                }
                                                //insede the square
                                                if ((y - squareRadious) < ((int)handPositionColor.Y) && (y + squareRadious) > ((int)handPositionColor.Y) && (x - squareRadious) < ((int)handPositionColor.X) && (x + squareRadious) > ((int)handPositionColor.X))
                                                {
                                                    //get corner of the square if is the first
                                                    if (isFirst)
                                                    {
                                                        xref    = x;
                                                        yref    = y;
                                                        isFirst = false;
                                                    }

                                                    //select pixels insede the selected deep
                                                    if (depthPixels[i] < handPositionColor.Z + d2 && depthPixels[i] > handPositionColor.Z - d1 && depthPixels[i] != 0)
                                                    {
                                                        //scale actual depth
                                                        float deep = (((int)depthPixels[i] - ((int)handPositionColor.Z - d1)) * 250) / 200;


                                                        uint xs = (uint)((x - xref) / 10);
                                                        uint ys = (uint)((y - yref) / 10);

                                                        // we only add the max or the first, is first when is 255
                                                        if ((uint)deep > actualPhoto[ys, xs] || actualPhoto[ys, xs] == 255)
                                                        {
                                                            actualPhoto[ys, xs] = deep;
                                                        }
                                                    }
                                                }
                                            }
                                        }
                                    }//unsafe
                                }
                            }
                        }

                        //add to queues
                        images.Add(actualPhoto);
                        bodyData.Add(new BodyData()
                        {
                            HandConfidence = handConf,
                            HandPosition   = HandPositionDepth,
                            ElvowPosition  = ElbowPositionDepth
                        });
                    }//while (running)
                }
        }
        private async void StartCamera()
        {
            //body track
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
                // sensor camera
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(
                           Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                {
                    //local variables


                    // photo for prediction
                    float[,] lastImage;
                    // for show lastImage
                    Bitmap bm = null;

                    // Image camera
                    uint trackedBodyId        = 0;
                    JointConfidenceLevel conf = JointConfidenceLevel.Low;

                    while (running)
                    {
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            bm = new Bitmap(30, 30);
                            // for final image
                            uint[,] foto = new uint[300, 300];
                            lastImage    = new float[30, 30];
                            for (int i = 0; i < 30; i++)
                            {
                                for (int j = 0; j < 30; j++)
                                {
                                    lastImage[i, j] = 1;
                                    bm.SetPixel(i, j, System.Drawing.Color.Red);
                                }
                            }



                            // get depth
                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            //this.StatusText = "Received Capture: " + capture.Depth.DeviceTimestamp;

                            this.bitmapColorCamera.Lock();


                            // show normal camera
                            var color = capture.Color;

                            var region  = new Int32Rect(0, 0, color.WidthPixels, color.HeightPixels);
                            var region1 = new Int32Rect(0, 0, 30, 30);

                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);


                            //get position of the hand
                            using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                //get hand position
                                if (frame != null && frame.NumberOfBodies >= 1)
                                {
                                    if (trackedBodyId <= 0)
                                    {
                                        trackedBodyId = frame.GetBody(0).Id;
                                    }
                                    var numOfSkeletos = frame.NumberOfBodies;

                                    var body = frame.GetBodySkeleton(0);
                                    var pos  = body.GetJoint(JointId.HandRight).Position;
                                    conf = body.GetJoint(JointId.HandRight).ConfidenceLevel;
                                    var handposition = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);
                                    handpositionGlobal.X = handposition.Value.X;
                                    handpositionGlobal.Y = handposition.Value.Y;
                                    // z is the same
                                    handpositionGlobal.Z = pos.Z;
                                }
                            }

                            unsafe
                            {
                                using (var pin = color.Memory.Pin())
                                {
                                    // put color camera in bitmap
                                    this.bitmapColorCamera.WritePixels(region, (IntPtr)pin.Pointer, (int)color.Size, color.StrideBytes);

                                    //get pixel from color camera
                                    uint *  colorPixels = (uint *)this.bitmapColorCamera.BackBuffer;
                                    ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;


                                    int closest = 501;
                                    int x       = 0;
                                    int y       = 0;
                                    int cx      = 0;
                                    int cy      = 0;
                                    // Debug.WriteLine(pos.X);
                                    bool isFirst = true;
                                    int  xref = 0, yref = 0;
                                    int  finalcount = 0;
                                    for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                    {
                                        //if (i > 512000)
                                        //{
                                        //    colorPixels[i] = 0;
                                        //    continue;
                                        //}
                                        x++;
                                        if (i % 1280 == 0)
                                        {
                                            x = 0;
                                            y++;
                                        }


                                        //draw square around hand
                                        int color_data = 255 << 16; // R
                                        color_data |= 128 << 8;     // G
                                        color_data |= 255 << 0;     // B
                                        int ss = 150;

                                        if (x - ss == ((int)handpositionGlobal.X) && (y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }

                                        if (x + ss == ((int)handpositionGlobal.X) && (y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }
                                        if (y + ss == ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }

                                        if (y - ss == ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }

                                        //insede the square
                                        if ((y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X))
                                        {
                                            if (isFirst)
                                            {
                                                xref    = x;
                                                yref    = y;
                                                isFirst = false;
                                            }

                                            //select pixels insede the selected deep
                                            if (depthPixels[i] < handpositionGlobal.Z + 50 && depthPixels[i] > handpositionGlobal.Z - 150 && depthPixels[i] != 0)
                                            {
                                                //scale actual depth
                                                float deep  = (((int)depthPixels[i] - ((int)handpositionGlobal.Z - 150)) * 250) / 200;
                                                float deepF = deep / 255.0f;

                                                foto[y - yref, x - xref] = (uint)deep;

                                                uint xs = (uint)((x - xref) / 10);
                                                uint ys = (uint)((y - yref) / 10);

                                                if (deepF > lastImage[ys, xs] || lastImage[ys, xs] == 1)
                                                {
                                                    lastImage[ys, xs] = deepF;
                                                    var temp = System.Drawing.Color.FromArgb((int)deep, (int)deep, (int)deep);
                                                    bm.SetPixel((int)ys, (int)xs, temp);
                                                }
                                            }
                                            else
                                            {
                                                foto[y - yref, x - xref] = 1;

                                                uint xs = (uint)((x - xref) / 10);
                                                uint ys = (uint)((y - yref) / 10);



                                                // bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb((int)(depthPixels[i] / 100), (int)(depthPixels[i] / 100), (int)(depthPixels[i]) / 100));
                                            }
                                        }
                                    }
                                }
                            }

                            this.bitmapColorCamera.AddDirtyRect(region);
                            this.bitmapColorCamera.Unlock();

                            //this.bitmapFinal = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());

                            if (saveNext && actual > 0 && conf != JointConfidenceLevel.Low)
                            {
                                WriteToFile(lastImage, status);
                                if (actual == 1)
                                {
                                    saveNext = false;
                                    _main.RecordFedbackFill = new SolidColorBrush(Colors.Green);
                                }
                                actual--;
                            }
                        }
                    }
                }
        }
        private async void Recorder_Loaded()
        {
            //body track
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
                // sensor camera
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                {
                    uint trackedBodyId        = 0;
                    JointConfidenceLevel conf = JointConfidenceLevel.Low;

                    while (running)
                    {
                        //get capture
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            //create output array that represent the image
                            float[,] actualPhoto = new float[30, 30];
                            for (int i = 0; i < 30; i++)
                            {
                                for (int j = 0; j < 30; j++)
                                {
                                    actualPhoto[i, j] = 255;
                                }
                            }

                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);

                            //get position of the hand
                            using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                //get hand position
                                if (frame != null && frame.NumberOfBodies >= 1)
                                {
                                    if (trackedBodyId <= 0)
                                    {
                                        trackedBodyId = frame.GetBody(0).Id;
                                    }
                                    var numOfSkeletos = frame.NumberOfBodies;
                                    // Debug.WriteLine("num of bodies: " + numOfSkeletos);

                                    var body = frame.GetBodySkeleton(0);
                                    var pos  = body.GetJoint(JointId.HandRight).Position;

                                    var fpos  = body.GetJoint(JointId.HandTipRight).Position;
                                    var rpost = body.GetJoint(JointId.WristRight).Position;
                                    conf = body.GetJoint(JointId.HandRight).ConfidenceLevel;
                                    Quaternion orin = body.GetJoint(JointId.HandTipRight).Quaternion;
                                    //Debug.WriteLine(orin);
                                    //   Debug.WriteLine(pos);

                                    /*
                                     * if (conf == JointConfidenceLevel.Low)
                                     * {
                                     *  this.bitmapMain.AddDirtyRect(region);
                                     *  this.bitmapMain.Unlock();
                                     *
                                     *  this.Crop.Source = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());
                                     *  continue;
                                     * }*/

                                    //  Debug.WriteLine("confidende level   : " + conf);
                                    // transfor position to capture camera
                                    var oldHand = HandPositionColor;
                                    //set hand for prediction square
                                    var handposition = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);
                                    HandPositionColor.X = handposition.Value.X;
                                    HandPositionColor.Y = handposition.Value.Y;
                                    // z is the same
                                    HandPositionColor.Z = pos.Z;


                                    if (conf == JointConfidenceLevel.Medium || conf == JointConfidenceLevel.High)
                                    {
                                        manager.SetPosition(pos);
                                    }
                                    //  }
                                }
                            }


                            // show normal camera
                            var color = capture.Color;
                            unsafe
                            {
                                using (var pin = color.Memory.Pin())
                                {
                                    // we only using deep now
                                    ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;

                                    int closest = 501;
                                    int x       = 0;
                                    int y       = 0;
                                    int cx      = 0;
                                    int cy      = 0;
                                    // Debug.WriteLine(pos.X);
                                    bool isFirst = true;
                                    int  xref = 0, yref = 0;
                                    int  squareRadious = 150;

                                    for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                    {
                                        x++;
                                        if (i % 1280 == 0)
                                        {
                                            x = 0;
                                            y++;
                                        }
                                        //insede the square
                                        if ((y - squareRadious) < ((int)HandPositionColor.Y) && (y + squareRadious) > ((int)HandPositionColor.Y) && (x - squareRadious) < ((int)HandPositionColor.X) && (x + squareRadious) > ((int)HandPositionColor.X))
                                        {
                                            if (isFirst)
                                            {
                                                xref    = x;
                                                yref    = y;
                                                isFirst = false;
                                            }


                                            if (depthPixels[i] < closest)
                                            {
                                                if (ox == 0)
                                                {
                                                    ox = cx;
                                                    oy = cy;
                                                }
                                                closest = depthPixels[i];

                                                cx = x;
                                                cy = y;
                                            }


                                            //select pixels insede the selected deep
                                            if (depthPixels[i] < HandPositionColor.Z + 50 && depthPixels[i] > HandPositionColor.Z - 150 && depthPixels[i] != 0)
                                            {
                                                float deep = (((int)depthPixels[i] - ((int)HandPositionColor.Z - 150)) * 250) / 200;
                                                //  Debug.WriteLine("deep" + (uint)deep);
                                                uint xs = (uint)((x - xref) / 10);
                                                uint ys = (uint)((y - yref) / 10);

                                                if ((uint)deep > actualPhoto[ys, xs] || actualPhoto[ys, xs] == 255)
                                                {
                                                    actualPhoto[ys, xs] = deep;
                                                }
                                            }
                                        }
                                    }

                                    // manager.SetPosition(new Vector3(cx, cy, 0));
                                }
                            }

                            if (conf != JointConfidenceLevel.Low)
                            {
                                images.Add(actualPhoto);
                                // Debug.WriteLine("add from queue");
                                if (images.Count > 10)
                                {
                                }
                            }


                            //  Debug.WriteLine(fotoS);
                        }
                    }
                }
        }
Пример #6
0
    // New method with signature for Kinect V2 instance of Body.
    public void CopyFromBodyTrackingSdk(Windows.Kinect.Body body, CoordinateMapper coordinateMapper)
    {
        Windows.Kinect.JointType[] k2Joints =
        {
            Windows.Kinect.JointType.SpineBase,
            Windows.Kinect.JointType.SpineMid,
            Windows.Kinect.JointType.Neck,
            Windows.Kinect.JointType.Head,
            Windows.Kinect.JointType.ShoulderRight,
            Windows.Kinect.JointType.ElbowRight,
            Windows.Kinect.JointType.WristRight,
            Windows.Kinect.JointType.HandRight,
            Windows.Kinect.JointType.ShoulderLeft,
            Windows.Kinect.JointType.ElbowLeft,
            Windows.Kinect.JointType.WristLeft,
            Windows.Kinect.JointType.HandLeft,
            Windows.Kinect.JointType.HipRight,
            Windows.Kinect.JointType.KneeRight,
            Windows.Kinect.JointType.AnkleRight,
            Windows.Kinect.JointType.FootRight,
            Windows.Kinect.JointType.HipLeft,
            Windows.Kinect.JointType.KneeLeft,
            Windows.Kinect.JointType.AnkleLeft,
            Windows.Kinect.JointType.FootLeft,
            Windows.Kinect.JointType.SpineChest,
            Windows.Kinect.JointType.HandTipRight,
            Windows.Kinect.JointType.ThumbRight,
            Windows.Kinect.JointType.HandTipLeft,
            Windows.Kinect.JointType.ThumbLeft,
        };

        Id = (uint)body.TrackingId;

        for (int bodyPoint = 0; bodyPoint < 25; bodyPoint++)
        {
            //Extract the vector3 from the Position data of this body frame
            System.Numerics.Vector3 pos3D = new System.Numerics.Vector3(
                body.Joints[k2Joints[bodyPoint]].Position.X,
                body.Joints[k2Joints[bodyPoint]].Position.Y,
                body.Joints[k2Joints[bodyPoint]].Position.Z
                );

            // Extract the Quaternion from the Orientation data of this body frame
            System.Numerics.Quaternion pos4D = new System.Numerics.Quaternion(
                body.JointOrientations[k2Joints[bodyPoint]].Orientation.X,
                body.JointOrientations[k2Joints[bodyPoint]].Orientation.Y,
                body.JointOrientations[k2Joints[bodyPoint]].Orientation.Z,
                body.JointOrientations[k2Joints[bodyPoint]].Orientation.W
                );

            JointPositions3D[bodyPoint] = pos3D;
            JointRotations[bodyPoint]   = pos4D;

            //TODO: Find the JointConfidence equalivent for Kinect V2
            JointPrecisions[bodyPoint] = new JointConfidenceLevel();


            // Map 3d point from kinect sensor to a 2D point.
            var position2d = coordinateMapper.MapCameraPointToColorSpace(body.Joints[k2Joints[bodyPoint]].Position);

            if (position2d != null)
            {
                JointPositions2D[bodyPoint] = new System.Numerics.Vector2(position2d.X, position2d.Y);
            }
            else
            {
                JointPositions2D[bodyPoint].X = Constants.Invalid2DCoordinate;
                JointPositions2D[bodyPoint].Y = Constants.Invalid2DCoordinate;
            }
        }
    }