Наследование: MonoBehaviour
Пример #1
0
    void OnPostRender()
    {
        if (!mat) {
            mat = new Material(Shader.Find("VertexLit"));
        }
        mat.SetPass (0);

        if (depthGenerator != null) {
            IntPtr depthP = depthGenerator.GetDepthMapPtr ();
            int focalLength = (int)depthGenerator.GetIntProperty ("ZPD");
            double pixelSize = depthGenerator.GetRealProperty ("ZPPS");
            Matrix4x4 m = camera.worldToCameraMatrix;

            bool result;

            // Draw the point cloud
            result = setMatrix (m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8], m[9],
            m[10], m[11], m[12], m[13], m[14], m[15]);
            setBias (openNI.bias.x, openNI.bias.y, openNI.bias.z);
            setScale (openNI.scale.x, openNI.scale.y, openNI.scale.z);
            setRotation (openNI.rotation.x, openNI.rotation.y, openNI.rotation.z);
            result = drawPointCloud (focalLength, pixelSize, 640, 480, depthP);
            Debug.Log ("DrawPointCloud returned: " + result);
        } else {
            // Try again
            openNI = GameObject.FindGameObjectWithTag ("OpenNI").GetComponent (typeof(OpenNI)) as OpenNI;
            depthGenerator = openNI.depth;
        }
    }
Пример #2
0
 /// @brief Initializes the current structure from an OpenNI version.
 /// @param ver the version to initialize from.
 public void InitFromOpenNIVersion(OpenNI.Version ver)
 {
     m_major=ver.Major;
     m_minor=ver.Minor;
     m_maintenance = ver.Maintenance;
     m_build=ver.Build;
 }
Пример #3
0
        /// <summary>
        /// Check if image in the collection
        /// exists within a specified region of the
        /// @param handpoint
        /// </summary>
        public int findImageAt(OpenNI.Point3D p)
        {
            int retVal = -1;

            for (int i = 0; i < this.images.Count; i++)
            {
                if (pointsOverlap(p, this.images.ElementAt(i).getCoordinates()))
                {
                    retVal = i;
                    return retVal;
                }
            }
            return retVal;
        }
Пример #4
0
        /// <summary>
        /// Check if image in the collection
        /// exists within a specified region of the
        /// @param handpoint
        /// </summary>
        public int findImageAt(OpenNI.Point3D p)
        {
            int retVal = int.MinValue;

            if (pointsOverlap(p, new Point(LEFT_X, LEFT_Y)))
            {
                if (startIndex - DISPLAY_COUNT < 0)
                {
                    startIndex = 0;
                }
                else
                {
                    startIndex -= DISPLAY_COUNT;
                }
                return MOVE_LEFT;
            }

            if (pointsOverlap(p, new Point(RIGHT_X, RIGHT_Y)))
            {
                if (startIndex + DISPLAY_COUNT >= this.images.Count())
                {
                    if (images.Count() - DISPLAY_COUNT <= 0)
                    {
                        startIndex = 0;
                    }
                    else
                    {
                        startIndex = images.Count() - DISPLAY_COUNT;
                    }
                }
                else
                {
                    startIndex += DISPLAY_COUNT;
                }
                return MOVE_RIGHT;
            }

            for (int i = startIndex; i < DISPLAY_COUNT; i++)
            {
                if (i < this.images.Count() && pointsOverlap(p, this.images.ElementAt(i).getCoordinates()))
                {
                    retVal = i;
                    return retVal;
                }
            }

            return retVal;
        }
Пример #5
0
        /// <summary>
        /// Check if image in the collection
        /// exists within a specified region of the
        /// @param handpoint
        /// </summary>
        public int findImageAt(OpenNI.Point3D p)
        {
            int retVal = -1;
            double minDistSoFar = Double.MaxValue;
            double currDist = 0;

            for (int i = 0; i < this.images.Count; i++)
            {
                currDist = pointsOverlap(p, this.images.ElementAt(i).getCoordinates());
                if (currDist < minDistSoFar)
                {
                    minDistSoFar = currDist;
                    retVal = i;
                }
            }
            return retVal;
        }
Пример #6
0
    void DrawLineBetweenJoints(OpenNI.SkeletonJoint first, OpenNI.SkeletonJoint second)
    {
        NISelectedPlayer player = playerSelection.GetPlayer(0);
        OpenNI.SkeletonJointPosition firstJointPosition;
        player.GetSkeletonJointPosition(first, out firstJointPosition);
        OpenNI.SkeletonJointPosition secondJointPosition;
        player.GetSkeletonJointPosition(second, out secondJointPosition);

        if (firstJointPosition.Confidence <= 0.5 || secondJointPosition.Confidence <= 0.5) return;

        OpenNI.Point3D firstJointScreenPosition = depthGenerator.ConvertRealWorldToProjective(firstJointPosition.Position);
        OpenNI.Point3D secondJointScreenPosition = depthGenerator.ConvertRealWorldToProjective(secondJointPosition.Position);
        DrawLine.DrawSimpleLine(ref mapPixels,
           (int)(width - firstJointScreenPosition.X / factor), (int)(height - firstJointScreenPosition.Y / factor),
           (int)(width - secondJointScreenPosition.X / factor), (int)(height - secondJointScreenPosition.Y / factor),
            width, height,
            Color.white);
    }
Пример #7
0
    //    public Vector3 ConvertKinectPosition(OpenNI.Point3D position)
    //    {
    //        //we have to flip the z axis to get into unity's coordinate system
    //        Vector3 newPosition = Vector3.zero;
    //        newPosition.x = position.X;
    //        newPosition.y = position.Y;
    //        newPosition.z = -position.Z;
    //
    //        newPosition = kinectToUnityScale * (kinectFloorRotator * newPosition);
    //
    //        return newPosition;
    //    }
    public Vector3 ConvertKinectPosition(OpenNI.Point3D position)
    {
        //we have to flip the z axis to get into unity's coordinate system
        Vector3 newPosition = Vector3.zero;
        newPosition.x = position.X;
        newPosition.y = position.Y;
        newPosition.z = -position.Z;

        newPosition = kinectToUnityScale * (Quaternion.Euler(0, yawOffset, 0) * kinectFloorRotator * newPosition);

        if (setKinectOriginToFloor)
        {
            newPosition.y += kinectDistanceFromFloor;
        }

        newPosition += positionOffset;

        return newPosition;
    }
Пример #8
0
        void HandTracker_HandUpdate(object sender, OpenNI.HandUpdateEventArgs e)
        {
            Dispatcher.BeginInvoke((Action)delegate
            {
                if (!handTrackingControlMap.ContainsKey(e.UserID))
                {
                    return;
                }

                TrackingDataControl trackingDataControl = handTrackingControlMap[e.UserID];
                trackingDataControl.xTextBox.Text = e.Position.X.ToString();
                trackingDataControl.yTextBox.Text = e.Position.Y.ToString();
                trackingDataControl.zTextBox.Text = e.Position.Z.ToString();
            });
        }
	/*
	*	Kinect 1
	*/
	public Vector3 ConvertRawKinectLocation(OpenNI.Point3D position)
	{
		//we have to flip the z axis to get into unity's coordinate system
		Vector3 newPosition = Vector3.zero;
		newPosition.x = position.X;
		newPosition.y = position.Y;
		newPosition.z = -position.Z;
		
		newPosition = kinectToUnityScale * newPosition;
		
		return newPosition;
	}
Пример #10
0
 private void FrmMainFormClosed(object sender, FormClosedEventArgs e)
 {
     OpenNI.Shutdown();
     NiTE.Shutdown();
 }
Пример #11
0
 private void FrmMainFormClosing(object sender, FormClosingEventArgs e)
 {
     userTracker?.Dispose();
     NiTE.Shutdown();
     OpenNI.Shutdown();
 }
Пример #12
0
 /// <summary>
 /// Update the coordinates of the 
 /// image at the @param imageSelectedIndex with
 /// the @param handPoint as the new location
 /// </summary>
 internal void updateImageAtIndex(int imageSelectedIndex, OpenNI.Point3D handPoint, int imageSelectedId)
 {
     dbManager.updateImageCoordinatesWithId(imageSelectedId, handPoint);
     this.images.ElementAt(imageSelectedIndex).moveImage((int)handPoint.X, (int)handPoint.Y);
 }
Пример #13
0
 public void checkAndSelectImage(OpenNI.Point3D handPoint)
 {
     int imageIndex = collection.findImageAt(handPoint);
     if (imageIndex > -1)
     {
         collection.selectImageAtIndex(imageIndex);
         imageSelectedIndex = imageIndex;
         imageSelectedId = collection.getIdImageAtIndex(imageSelectedIndex);
         Console.WriteLine("Index, Id: " + imageSelectedIndex + "," + imageSelectedId + collection.getImageAtIndex(imageSelectedIndex).getCanvasId());
     }
 }
Пример #14
0
 void Start()
 {
     openNI = GameObject.FindGameObjectWithTag ("OpenNI").GetComponent (typeof(OpenNI)) as OpenNI;
     depthGenerator = openNI.depth;
 }
Пример #15
0
        public static void Main(string[] args)
        {
            var OpenNIPathOnWindows = "%ProgramFiles%\\OpenNI2\\Redist";

            InteropHelper.RegisterLibrariesSearchPath(OpenNIPathOnWindows);

            Console.WriteLine("Runnung in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");

            OpenNI.Initialize();

            var version = OpenNI.GetVersion();

            Console.WriteLine("OpenNI version: {0}.{1}.{2}.{3}.", version.Major, version.Minor, version.Maintenance, version.Build);
            Console.WriteLine();

            //OpenNI.SetLogMinSeverity(0);
            //OpenNI.SetLogConsoleOutput(true);
            //OpenNI.SetLogFileOutput(true);
            //Console.WriteLine("Log file path: {0}", OpenNI.GetLogFileName());

            var devices = OpenNI.GetDevices();

            Console.WriteLine("Found {0} device(s):", devices.Length);
            devices.ToList().ForEach(x => Console.WriteLine("{0} from {1} @ {2} with usb id {3}:{4}.", x.Name, x.Vendor, x.Uri, x.UsbVendorId, x.UsbProductId));
            Console.WriteLine();

            if (devices.Length == 0)
            {
                return;
            }

            // open default device
            using (var device = Device.Open())
            {
                var deviceInfo = device.GetDeviceInfo();
                Console.WriteLine("Device {0} @ {1} was successfully opened.", deviceInfo.Name, deviceInfo.Uri);
                Console.WriteLine();

                if (device.IsDriverVersionPropertySupported)
                {
                    var driverVersion = device.DriverVersion;
                    Console.WriteLine("Driver version: {0}.{1}.{2}.{3}.", driverVersion.Major, driverVersion.Minor, driverVersion.Maintenance, driverVersion.Build);
                    Console.WriteLine("Hardware version: {0}.", device.HardwareVersion);
                    Console.WriteLine("Serial number: {0}.", device.SerialNumber);
                    Console.WriteLine();
                }

                var infraredSensorInfo = device.GetSensorInfo(SensorType.Infrared);
                DescribeSensor(infraredSensorInfo);
                Console.WriteLine();

                var colorSensorInfo = device.GetSensorInfo(SensorType.Color);
                DescribeSensor(colorSensorInfo);
                Console.WriteLine();

                var depthSensorInfo = device.GetSensorInfo(SensorType.Depth);
                DescribeSensor(depthSensorInfo);
                Console.WriteLine();

                using (var stream = device.CreateStream(SensorType.Depth))
                {
                    var streamSensorInfo = stream.GetSensorInfo();
                    DescribeVideoModes(streamSensorInfo);
                    Console.WriteLine();

                    stream.VideoMode = new VideoMode {
                        Fps = 30, PixelFormat = PixelFormat.Depth100UM, ResolutionX = 640, ResolutionY = 480
                    };

                    var videoMode = stream.VideoMode;

                    var hFov     = stream.IsHorizontalFovPropertySupported ? stream.HorizontalFov : float.NaN;
                    var vFov     = stream.IsVerticalFovPropertySupported ? stream.VerticalFov : float.NaN;
                    var minValue = stream.IsMinValuePropertySupported ? stream.MinValue : -1;
                    var maxValue = stream.IsMaxValuePropertySupported ? stream.MaxValue : -1;

                    Console.WriteLine("Stream properties:");
                    var rtdK = (float)(180.0 / Math.PI);
                    Console.WriteLine("Horizontal {0:0.0} (deg) and vertical {1:0.0} (deg) FOV.", hFov * rtdK, vFov * rtdK);
                    Console.WriteLine("Min {0} and max {1} values.", minValue, maxValue);

                    stream.Start();

                    for (int i = 0; i < 30; i++)
                    {
                        ReadFreame(stream);
                    }


                    stream.Stop();
                }
            }
            OpenNI.Shutdown();
        }
Пример #16
0
    public Quaternion ConvertKinectRotation(OpenNI.SkeletonJointOrientation rotation)
    {
        Vector3 up = new Vector3(rotation.Y1, rotation.Y2, rotation.Y3);
        Vector3 forward = new Vector3(rotation.Z1, rotation.Z2, rotation.Z3);

        if (up == Vector3.zero || forward == Vector3.zero) return Quaternion.identity;

        Quaternion newRotation = Quaternion.LookRotation(forward, up);

        newRotation.x = -newRotation.x;
        newRotation.y = -newRotation.y;

        newRotation = Quaternion.Euler(0, yawOffset, 0) * kinectFloorRotator * newRotation;
        //newRotation = Quaternion.Euler(0, yawOffset, 0) * newRotation;
        //if (applyKinectToRUIS) result *= kinectYaw;

        return newRotation;
    }
Пример #17
0
 static void OpenNI_onDeviceConnected(DeviceInfo Device)
 {
     ConnectedDevices = OpenNI.EnumerateDevices();
 }
Пример #18
0
 static void OpenNI_onDeviceStateChanged(DeviceInfo Device, OpenNI.DeviceState state)
 {
     ConnectedDevices = OpenNI.EnumerateDevices();
 }
Пример #19
0
 void _sensor_updated(object sender, OpenNI.Point3D handPoint)
 {
     Console.WriteLine("Hand point updated");
 }
 	public Quaternion ConvertRawKinectRotation(OpenNI.SkeletonJointOrientation rotation)
    {
        Vector3 up = new Vector3(rotation.Y1, rotation.Y2, rotation.Y3);
        Vector3 forward = new Vector3(rotation.Z1, rotation.Z2, rotation.Z3);

        if (up == Vector3.zero || forward == Vector3.zero) return Quaternion.identity;

        Quaternion newRotation = Quaternion.LookRotation(forward, up);

        newRotation.x = -newRotation.x;
        newRotation.y = -newRotation.y;

        return newRotation;
    }
Пример #21
0
 private void FrmMainFormClosing(object sender, FormClosingEventArgs e)
 {
     NiTE.Shutdown();
     OpenNI.Shutdown();
 }
Пример #22
0
        /// <summary>
        /// Check if two points lie
        /// within 100 pixels of each other
        /// return true if they do else
        /// return false
        /// </summary>
        public double pointsOverlap(OpenNI.Point3D pointOne, Point pointTwo)
        {
            double retVal = Double.MaxValue;

            double a = (double)(pointTwo.X - pointOne.X);
            double b = (double)(pointTwo.Y - pointOne.Y);

            double dist = Math.Sqrt(a * a + b * b);
            if (dist < 50)
            {
                retVal = dist;
            }

            return retVal;
        }
Пример #23
0
        private static void Main()
        {
            Console.WriteLine(OpenNI.Version.ToString());
            OpenNI.Status status = OpenNI.Initialize();
            if (!HandleError(status))
            {
                Environment.Exit(0);
            }

            OpenNI.OnDeviceConnected    += OpenNiOnDeviceConnected;
            OpenNI.OnDeviceDisconnected += OpenNiOnDeviceDisconnected;
            DeviceInfo[] devices = OpenNI.EnumerateDevices();
            if (devices.Length == 0)
            {
                return;
            }

            Device device;

            // lean init and no reset flags
            using (device = Device.Open(null, "lr"))
            {
                if (device.HasSensor(Device.SensorType.Depth) && device.HasSensor(Device.SensorType.Color))
                {
                    VideoStream depthStream = device.CreateVideoStream(Device.SensorType.Depth);
                    VideoStream colorStream = device.CreateVideoStream(Device.SensorType.Color);
                    if (depthStream.IsValid && colorStream.IsValid)
                    {
                        if (!HandleError(depthStream.Start()))
                        {
                            OpenNI.Shutdown();
                            return;
                        }

                        if (!HandleError(colorStream.Start()))
                        {
                            OpenNI.Shutdown();
                            return;
                        }

                        new Thread(DisplayInfo).Start();
                        depthStream.OnNewFrame += DepthStreamOnNewFrame;
                        colorStream.OnNewFrame += ColorStreamOnNewFrame;
                        VideoStream[] array = { depthStream, colorStream };
                        while (!Console.KeyAvailable)
                        {
                            VideoStream aS;
                            if (OpenNI.WaitForAnyStream(array, out aS) == OpenNI.Status.Ok)
                            {
                                if (aS.Equals(colorStream))
                                {
                                    inlineColor++;
                                }
                                else
                                {
                                    inlineDepth++;
                                }

                                aS.ReadFrame().Release();
                            }
                        }
                    }
                }

                Console.ReadLine();
            }

            OpenNI.Shutdown();
            Environment.Exit(0);
        }
 private bool FirstAboveSecond(OpenNI.Point3D p0, OpenNI.Point3D p1)
 {
     return p0.Y - p1.Y > 0;
 }
Пример #25
0
        private void UpdateHandSession(int id, OpenNI.Point3D position, OpenNI.Point3D shoulderPosition)
        {
            lock (HandSessions)
            {
                if (!HandSessions.ContainsKey(id))
                {
                    var session = new HandSession();
                    //session.PoseChanged += session_PoseChanged;
                    session.Id = id;
                    HandSessions.Add(session.Id, session);
                }

                HandSessions[id].xnPosition = position;
                HandSessions[id].Position = MotionHelper.XnPoint3DToPoint3D(position);
                HandSessions[id].ShoulderPosition = MotionHelper.XnPoint3DToPoint3D(shoulderPosition);
                OpenNI.Point3D projective = depthGenerator.ConvertRealWorldToProjective(position);
                HandSessions[id].PositionProjective = MotionHelper.XnPoint3DToPoint3D(projective);
                OnPointUpdated(id, HandSessions[id]);

            }
        }
Пример #26
0
 public void checkAndDisplayGallery(OpenNI.Point3D handPoint)
 {
     if(pointsOverlap(handPoint, new Point(200,50)))
     {
         drawGallery();
         gallery.isSelected = true;
     }
 }
Пример #27
0
 void _sensor_updated(object sender, OpenNI.Point3D handPoint, string evtName)
 {
     if(evtName.Equals("push"))
     {
         pushCount++;
         if (pushCount == 1)
         {
             checkAndSelectImage(handPoint);
         }
         if(pushCount == 2)
         {
             deselectImageAndReset();
         }
     }
     else if (imageSelectedIndex > -1)
     {
         DrawPixels(handPoint.X, handPoint.Y);
         collection.updateImageAtIndex(imageSelectedIndex, handPoint, imageSelectedId);
     }
 }
Пример #28
0
 public void deselectImageAndReset(OpenNI.Point3D handPoint)
 {
     if (pointsOverlap(handPoint, new Point(TRASH_RIGHT, 25)))
     {
         collection.removeImageAtIndex(imageSelectedIndex, imageSelectedId);
     }
     else
     {
         collection.deselectImageAtIndex(imageSelectedIndex);
     }
     pushCount = 0;
     imageSelectedId = -1;
     imageSelectedIndex = -1;
     Dispatcher.BeginInvoke((Action)delegate
     {
         canvas1.Children.Clear();
     });
 }
Пример #29
0
 /// <summary>
 /// Close the OpenNI (Kinect) environment.
 /// </summary>
 public static void Shutdown()
 {
     OpenNI.Shutdown();
 }
Пример #30
0
        void _sensor_updated(object sender, OpenNI.Point3D handPoint, string evtName)
        {
            if(evtName.Equals("push"))
            {
                pushCount++;

                if (gallery.isSelected)
                {
                    int galleryVal = gallery.findImageAt(handPoint);
                    if (galleryVal == ImageGallery.MOVE_LEFT || galleryVal == ImageGallery.MOVE_RIGHT)
                    {
                        drawGallery();
                    }
                    else if(galleryVal >= 0)
                    {
                        collection.addImage(gallery.getImageAtIndex(galleryVal));
                    }
                }
                else
                {
                    if (pushCount == 1)
                    {
                        //TODO: Need to ensure user cannot drop image
                        //on top of gallery button
                        checkAndDisplayGallery(handPoint);
                        checkAndSelectImage(handPoint);
                    }
                    if(pushCount == 2)
                    {
                        deselectImageAndReset(handPoint);
                    }
                }
            }
            else if (evtName.Contains("circle") && gallery.isSelected)
            {
                clearGalleryCanvas();
                gallery.isSelected = false;
                pushCount = 0;
                drawActiveImages();
            }
            else if (imageSelectedIndex > -1)
            {
                DrawPixels(handPoint.X, handPoint.Y);
                Console.WriteLine("Hand point: %f,%f", handPoint.X, handPoint.Y);
                collection.updateImageAtIndex(imageSelectedIndex, handPoint, imageSelectedId);
            }
        }
Пример #31
0
 internal static System.Windows.Media.Media3D.Point3D XnPoint3DToPoint3D(OpenNI.Point3D point)
 {
     return new System.Windows.Media.Media3D.Point3D(point.X, point.Y, point.Z);
 }
Пример #32
0
 /// <summary>
 /// Open the OpenNI (Kinect) environment.
 /// </summary>
 /// <returns>True if the process was successful; false otherwise.</returns>
 public static bool Initialize()
 {
     return(OpenNI.Initialize() == OpenNI.Status.Ok);
 }
Пример #33
0
        private void recognizeFingers(OpenNI.DepthMetaData depthMetaData)
        {
            //filter bitmap and save it to bufferSrc
            lock (bufferSrc)
            {
                unsafe
                {
                    ushort* pDepth = (ushort*)depthMetaData.DepthMapPtr.ToPointer();

                    fixed (double* calibrationMapPtr = calibrationMap)
                    {
                        double* pCal = calibrationMapPtr;
                        for (int i = 0; i < calibrationMap.Length; ++i, ++pDepth, ++pCal)
                        {
                            double dist = *pCal - (double)*pDepth;
                            bufferSrc[i] = (byte)(dist >= NoiseThreshold && dist < FingerThreshold ? 0xFF : 0);
                        }
                    }
                }
            }

            //do open operation
            lock (bufferDst)
            {
                unsafe
                {
                    fixed (byte* bufferSrcPtr = bufferSrc, bufferDstPtr = bufferDst, bufferSwitchPtr = bufferSwitch)
                    {
                        ImageProcessorLib.open(bufferSrcPtr, bufferDstPtr,width, height, width, bufferSwitchPtr);
                    }
                }
            }

            //extract point!
            int fingersNum;
            unsafe
            {
                fixed (byte* bufferSrcPtr = bufferSrc, bufferDstPtr = bufferDst, bufferSwitchPtr = bufferSwitch)
                {
                    fixed (int* fingersRawPtr = fingersRaw)
                    {
                        fingersNum = ImageProcessorLib.extractPoints(bufferDstPtr, null, width, height, width,
                            bufferSwitchPtr, MAX_FINGERS, 20, fingersRawPtr);
                    }
                }
            }

            Fingers.Clear();
            for (int i = 0; i < fingersNum; i++)
            {
                Fingers.Add(new Point3D(fingersRaw[2 * i], fingersRaw[2 * i + 1], 0));  //TODO: depth
            }
        }
 private bool VerticallyClose(OpenNI.Point3D p0, OpenNI.Point3D p1)
 {
     return Math.Abs(p0.Y - p1.Y) < MAX_DIMMING_HAND_OFFSET_Y;
 }
Пример #35
0
 public void checkAndSelectImage(OpenNI.Point3D handPoint)
 {
     int imageIndex = collection.findImageAt(handPoint);
     if (imageIndex > -1)
     {
         collection.selectImageAtIndex(imageIndex);
         imageSelectedIndex = imageIndex;
         imageSelectedId = collection.getIdImageAtIndex(imageSelectedIndex);
     }
 }
Пример #36
0
 public void Dispose()
 {
     OpenNI.Shutdown();
 }
Пример #37
0
        /// <summary>
        /// Check if two points lie
        /// within 100 pixels of each other
        /// return true if they do else
        /// return false
        /// </summary>
        public bool pointsOverlap(OpenNI.Point3D pointOne, Point pointTwo)
        {
            bool retVal = false;

            double a = (double)(pointTwo.X - pointOne.X);
            double b = (double)(pointTwo.Y - pointOne.Y);

            if (Math.Sqrt(a * a + b * b) < 100)
            {
                retVal = true;
            }

            return retVal;
        }
Пример #38
0
        void HandTracker_HandCreate(object sender, OpenNI.HandCreateEventArgs e)
        {
            Dispatcher.BeginInvoke((Action)delegate
            {
                TrackingDataControl trackingDataControl = new TrackingDataControl();
                trackingDataControl.Tag = e.UserID;
                handTrackingControlMap.Add(e.UserID, trackingDataControl);

                trackingDataControl.idLabel.Content = e.UserID;
                trackingDataControl.colorRect.Fill = new SolidColorBrush(IntColorConverter.ToColor(e.UserID));

                handTrackingStackPanel.Children.Add(trackingDataControl);
            });
        }
Пример #39
0
    private void UpdateKinectJointData(OpenNI.SkeletonJoint joint, int player, ref JointData jointData)
    {
        OpenNI.SkeletonJointTransformation data;

        if (!playerManager.GetPlayer(player).GetSkeletonJoint(joint, out data))
        {
            return;
        }

		jointData.position = coordinateSystem.ConvertLocation (coordinateSystem.ConvertRawKinectLocation(data.Position.Position), RUISDevice.Kinect_1);
        jointData.positionConfidence = data.Position.Confidence;
		jointData.rotation = coordinateSystem.ConvertRotation (coordinateSystem.ConvertRawKinectRotation(data.Orientation), RUISDevice.Kinect_1);
        jointData.rotationConfidence = data.Orientation.Confidence;
    }
Пример #40
0
        void HandTracker_HandDestroy(object sender, OpenNI.HandDestroyEventArgs e)
        {
            Dispatcher.BeginInvoke((Action)delegate
               {
               if (!handTrackingControlMap.ContainsKey(e.UserID))
               {
                   return;
               }

               handTrackingStackPanel.Children.Remove(handTrackingControlMap[e.UserID]);
               handTrackingControlMap.Remove(e.UserID);
               });
        }