public SimpleSkeletonHandTracker(int width, int height, CoordinateMapper coordMapper,
                              int bufferSize = 1)
 {
     mapper = new CoordinateConverter(coordMapper, HandInputParams.ColorImageFormat,
                                HandInputParams.DepthImageFormat);
       Init(width, height);
 }
Exemplo n.º 2
0
        /// <summary>
        /// Initializes a new instance of the FaceTracker class from a reference of the Kinect device.
        /// <param name="sensor">Reference to kinect sensor instance</param>
        /// </summary>
        public FaceTracker(KinectSensor sensor)
        {
            if (sensor == null) {
            throw new ArgumentNullException("sensor");
              }

              if (!sensor.ColorStream.IsEnabled) {
            throw new InvalidOperationException("Color stream is not enabled yet.");
              }

              if (!sensor.DepthStream.IsEnabled) {
            throw new InvalidOperationException("Depth stream is not enabled yet.");
              }

              this.operationMode = OperationMode.Kinect;
              this.coordinateMapper = sensor.CoordinateMapper;
              this.initializationColorImageFormat = sensor.ColorStream.Format;
              this.initializationDepthImageFormat = sensor.DepthStream.Format;

              var newColorCameraConfig = new CameraConfig(
              (uint)sensor.ColorStream.FrameWidth,
              (uint)sensor.ColorStream.FrameHeight,
              sensor.ColorStream.NominalFocalLengthInPixels,
              FaceTrackingImageFormat.FTIMAGEFORMAT_UINT8_B8G8R8X8);
              var newDepthCameraConfig = new CameraConfig(
              (uint)sensor.DepthStream.FrameWidth,
              (uint)sensor.DepthStream.FrameHeight,
              sensor.DepthStream.NominalFocalLengthInPixels,
              FaceTrackingImageFormat.FTIMAGEFORMAT_UINT16_D13P3);
              this.Initialize(newColorCameraConfig, newDepthCameraConfig, IntPtr.Zero, IntPtr.Zero, this.DepthToColorCallback);
        }
Exemplo n.º 3
0
 public HandJointTracker(CoordinateMapper coordMapper)
 {
     this.coordMapper = coordMapper;
       imageStorage = new byte[height, width, 3];
       gray = new Image<Gray, byte>(width, height);
       scaled = new Image<Gray, byte>(width / 4, height / 4);
 }
Exemplo n.º 4
0
        // Ctr
        public KinectRecorder(KinectRecordOptions options, CoordinateMapper mapper,
        float colorFocalLength, float depthFocalLength, Stream stream)
        {
            Options = options;

              recordStream = stream;
              writer = new BinaryWriter(recordStream);

              var coordParams = mapper.ColorToDepthRelationalParameters;
              int count = coordParams.Count;
              byte[] array = new byte[count];
              coordParams.CopyTo(array, 0);
              writer.Write(count);
              writer.Write(array);
              writer.Write(colorFocalLength);
              writer.Write(depthFocalLength);

              writer.Write((int)Options);

              if ((Options & KinectRecordOptions.Color) != 0) {
            colorRecoder = new ColorRecorder(writer);
              }
              if ((Options & KinectRecordOptions.Depth) != 0) {
            depthRecorder = new DepthRecorder(writer);
              }
              if ((Options & KinectRecordOptions.Skeletons) != 0) {
            skeletonRecorder = new SkeletonRecorder(writer);
              }

              previousFlushDate = DateTime.Now;
        }
Exemplo n.º 5
0
        private void ProcessJoints(Skeleton skeleton)
        {
            foreach (var name in Enum.GetNames(typeof(JointType)))
            {
                var jointType = (JointType)Enum.Parse(typeof(JointType), name);

                var coordinateMapper = new CoordinateMapper(_kinectSensor);
                var joint = skeleton.Joints[jointType];

                var skeletonPoint = joint.Position;
                if (joint.TrackingState == JointTrackingState.NotTracked)
                    continue;

                if (jointType == JointType.HandRight)
                    _gestureDetector.Add(joint.Position, _kinectSensor);

                var colorPoint = coordinateMapper.MapSkeletonPointToColorPoint(skeletonPoint, ColorImageFormat.RgbResolution640x480Fps30);
                if (!_ellipses.ContainsKey(jointType))
                {
                    _ellipses[jointType] = new Ellipse { Width = 20, Height = 20, Fill = Brushes.SandyBrown };
                    SkeletonCanvas.Children.Add(_ellipses[jointType]);
                }
                Canvas.SetLeft(_ellipses[jointType], colorPoint.X - _ellipses[jointType].Width / 2);
                Canvas.SetTop(_ellipses[jointType], colorPoint.Y - _ellipses[jointType].Height / 2);
            }
        }
Exemplo n.º 6
0
		//Run the application async
		static async Task RunAsync()
		{
			//Get the default Kinect Sensor
			_kinectSensor = KinectSensor.GetDefault();

			// open the reader for the body frames
			_bodyFrameReader = _kinectSensor.BodyFrameSource.OpenReader();

			// Set the coordinate Mapper
			_coordinateMapper = _kinectSensor.CoordinateMapper;

			//open the sensor
			_kinectSensor.Open();

			//Check if the Sensor is available
			Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort"));
			while (!_kinectSensor.IsAvailable)
			{
				//wait for sensor
			}
			Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort"));

			//Init gesture
			_handOverHeadDetector = new HandOverHeadDetector(HandDetectionType.BothHands, HandState.Open);
			//Subscribe to completed event
			_handOverHeadDetector.GestureCompleteEvent += HandOverHeadDetectorOnGestureCompleteEvent;

			//Start reciving kinect Frames
			if (_bodyFrameReader != null)
			{
				_bodyFrameReader.FrameArrived += Reader_FrameArrived;
			}
		}
Exemplo n.º 7
0
        public void Initialize(KinectSensor sensor)
        {
            this.sensor = sensor;
            coordinateMapper = new CoordinateMapper(sensor);

            //Prepare for RGB image information receive
            sensor.ColorStream.Enable(ColorImageFormat.RgbResolution1280x960Fps12);
            imageSize = new Int32Rect(0, 0, sensor.ColorStream.FrameWidth, sensor.ColorStream.FrameHeight);
            stride = imageSize.Width * 4; // blue, green, red, empty
            colorData = new byte[sensor.ColorStream.FramePixelDataLength];
            ColorBitmap = new WriteableBitmap(imageSize.Width, imageSize.Height, 96, 96, PixelFormats.Bgr32, null);

            TransformSmoothParameters smooth = new TransformSmoothParameters()
            {
                Smoothing = Parameters.Kinect.Smoothing,
                Correction = Parameters.Kinect.Correction,
                Prediction = Parameters.Kinect.Prediction,
                JitterRadius = Parameters.Kinect.JitterRadius,
                MaxDeviationRadius = Parameters.Kinect.MaxDeviationRadius
            };
            sensor.SkeletonStream.TrackingMode = Parameters.Kinect.TrackingMode;
            sensor.SkeletonStream.Enable(smooth);

            sensor.SkeletonFrameReady += new EventHandler<SkeletonFrameReadyEventArgs>(sensor_SkeletonFrameReady);
            sensor.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(sensor_ColorFrameReady);
            sensor.Start();
            Initialized = true;
        }
Exemplo n.º 8
0
 public CoordinateConverter(IEnumerable<byte> kinectParams, ColorImageFormat cif, 
                     DepthImageFormat dif)
 {
     mapper = new CoordinateMapper(kinectParams);
       this.cif = cif;
       this.dif = dif;
 }
Exemplo n.º 9
0
        public static void DrawSkeleton(this Canvas canvas, Body body, CoordinateMapper mapper)
        {
            if (body == null) return;

            foreach (Joint joint in body.Joints.Values)
            {
                canvas.DrawPoint(joint, mapper);
            }

            canvas.DrawLine(body.Joints[JointType.Head], body.Joints[JointType.Neck], mapper);
            canvas.DrawLine(body.Joints[JointType.Neck], body.Joints[JointType.SpineShoulder], mapper);
            canvas.DrawLine(body.Joints[JointType.SpineShoulder], body.Joints[JointType.ShoulderLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.SpineShoulder], body.Joints[JointType.ShoulderRight], mapper);
            canvas.DrawLine(body.Joints[JointType.SpineShoulder], body.Joints[JointType.SpineMid], mapper);
            canvas.DrawLine(body.Joints[JointType.ShoulderLeft], body.Joints[JointType.ElbowLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.ShoulderRight], body.Joints[JointType.ElbowRight], mapper);
            canvas.DrawLine(body.Joints[JointType.ElbowLeft], body.Joints[JointType.WristLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.ElbowRight], body.Joints[JointType.WristRight], mapper);
            canvas.DrawLine(body.Joints[JointType.WristLeft], body.Joints[JointType.HandLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.WristRight], body.Joints[JointType.HandRight], mapper);
            canvas.DrawLine(body.Joints[JointType.HandLeft], body.Joints[JointType.HandTipLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.HandRight], body.Joints[JointType.HandTipRight], mapper);
            canvas.DrawLine(body.Joints[JointType.HandTipLeft], body.Joints[JointType.ThumbLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.HandTipRight], body.Joints[JointType.ThumbRight], mapper);
            canvas.DrawLine(body.Joints[JointType.SpineMid], body.Joints[JointType.SpineBase], mapper);
            canvas.DrawLine(body.Joints[JointType.SpineBase], body.Joints[JointType.HipLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.SpineBase], body.Joints[JointType.HipRight], mapper);
            canvas.DrawLine(body.Joints[JointType.HipLeft], body.Joints[JointType.KneeLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.HipRight], body.Joints[JointType.KneeRight], mapper);
            canvas.DrawLine(body.Joints[JointType.KneeLeft], body.Joints[JointType.AnkleLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.KneeRight], body.Joints[JointType.AnkleRight], mapper);
            canvas.DrawLine(body.Joints[JointType.AnkleLeft], body.Joints[JointType.FootLeft], mapper);
            canvas.DrawLine(body.Joints[JointType.AnkleRight], body.Joints[JointType.FootRight], mapper);
        }
Exemplo n.º 10
0
        /// <summary>
        /// Starts a new Task and listens to KinectSensors StatusChanged event.
        /// </summary>
        /// <returns>Eventually returns a kinect sensor when one is connected.</returns>
        public static Task<KinectSensor> GetKinect()
        {
            return Task.Factory.StartNew<KinectSensor>(() =>
            {
                if (kinectSensor != null) return kinectSensor;

                var kinect = KinectSensor.KinectSensors.FirstOrDefault(_ => _.Status == KinectStatus.Connected);
                if (kinect != null)
                {
                    kinectSensor = kinect;
                    return kinectSensor;
                }

                using (var signal = new ManualResetEventSlim())
                {
                    KinectSensor.KinectSensors.StatusChanged += (s, e) =>
                    {
                        if (e.Status == KinectStatus.Connected)
                        {
                            kinectSensor = e.Sensor;
                            coordinateMapper = new CoordinateMapper(kinectSensor);
                            signal.Set();
                        }
                    };

                    signal.Wait();
                }

                return kinectSensor;
            });
        }
Exemplo n.º 11
0
 public void Init(
   CoordinateMapper mapper,
   Int32Rect colourFrameSize)
 {
     canvasCoordMapper = new CanvasCoordMapper(canvas,
       mapper, colourFrameSize);
 }
Exemplo n.º 12
0
        /// <summary>
        /// Initializes a new instance of the KinectBodyView class
        /// </summary>
        /// <param name="kinectSensor">Active instance of the KinectSensor</param>
        public KinectBodyView(KinectSensor kinectSensor)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            // get the coordinate mapper
            _coordinateMapper = kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            _displayWidth = frameDescription.Width;
            _displayHeight = frameDescription.Height;



            // Create the drawing group we'll use for drawing
            _drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            _imageSource = new DrawingImage(_drawingGroup);
        }
Exemplo n.º 13
0
 /// <summary>
 /// Creates a new instance of BackgroundRemovalTool.
 /// </summary>
 public BackgroundRemovalTool()
 {
     if (CoordinateMapper == null)
     {
         CoordinateMapper = KinectSensor.GetDefault().CoordinateMapper;
     }
 }
 public SkeletalTracker(Rectangle fullscreen, CoordinateMapper coordinateMapper, ColorImageFormat colorFormat)
 {
     _coordinateMapper = coordinateMapper;
     _colorFormat = colorFormat;
     _fullscreen = fullscreen;
     _aspectRatio = _fullscreen.Width / (double)_fullscreen.Height;
 }
Exemplo n.º 15
0
        /// <summary>
        /// Update map from a depth frame
        /// </summary>
        /// <param name="coordinateMapper"></param>
        /// <param name="depthFrame"></param>
        public void Update(CoordinateMapper coordinateMapper, DepthFrameData depthFrame)
        {
            if (this.dataPointer == IntPtr.Zero)
                throw new ObjectDisposedException("ColorToDepthFrameData");

            coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr(depthFrame.DataPointer,(uint)depthFrame.SizeInBytes,this.dataPointer,(uint)this.sizeInBytes);
        }
Exemplo n.º 16
0
 public BodyDrawer(CoordinateMapper coordinateMapper, FrameDescription frameDescription, DrawingGroup drawingGroup)
 {
     this.bonesToDraw = (new BodyInitializer()).GetBones();
     this.coordinateMapper = coordinateMapper;
     this.displayHeight = frameDescription.Height;
     this.displayWidth = frameDescription.Width;
     this.drawingGroup = drawingGroup;
 }
        /// <summary>
        /// コンストラクタ
        /// </summary>
        /// <param name="kinect"></param>
        /// <param name="canvas"></param>
        public KinectSkeletonDrawer( KinectSensor kinect, Canvas canvas )
        {
            this.kinect = kinect;
            this.mapper = kinect.CoordinateMapper;
            this.canvas = canvas;

            SkeletonConvert = Skeleton2DPoint.Depth;
        }
Exemplo n.º 18
0
 public HandSliderControl(HandDetectionType handDetectionType, HandState requirredHandState,
     Directions direction, CoordinateMapper mapper)
     : base(handDetectionType, requirredHandState)
 {
     SmoothLevel = 0;
     Direction = direction;
     _mapper = mapper;
 }
Exemplo n.º 19
0
        /// <summary>
        /// Maps a 3D skeleton point to a 2D vector.
        /// </summary>
        /// <param name="sensor">The Kinect sensor.</param>
        /// <param name="position">The skeleton point to map.</param>
        /// <param name="coordinateMapper">The coordinate mapper.</param>
        /// <returns>The 2D mapped position.</returns>
        public static Vector2 Convert(KinectSensor sensor, SkeletonPoint position, CoordinateMapper coordinateMapper)
        {
            float width = 0;
            float height = 0;
            float x = 0;
            float y = 0;

            if (sensor.ColorStream.IsEnabled)
            {
                var colorPoint = coordinateMapper.MapSkeletonPointToColorPoint(position, sensor.ColorStream.Format);
                x = colorPoint.X;
                y = colorPoint.Y;

                switch (sensor.ColorStream.Format)
                {
                    case ColorImageFormat.RawYuvResolution640x480Fps15:
                    case ColorImageFormat.RgbResolution640x480Fps30:
                    case ColorImageFormat.YuvResolution640x480Fps15:
                        width = 640;
                        height = 480;
                        break;
                    case ColorImageFormat.RgbResolution1280x960Fps12:
                        width = 1280;
                        height = 960;
                        break;
                }
            }
            else if (sensor.DepthStream.IsEnabled)
            {
                var depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(position, sensor.DepthStream.Format);
                x = depthPoint.X;
                y = depthPoint.Y;

                switch (sensor.DepthStream.Format)
                {
                    case DepthImageFormat.Resolution80x60Fps30:
                        width = 80;
                        height = 60;
                        break;
                    case DepthImageFormat.Resolution320x240Fps30:
                        width = 320;
                        height = 240;
                        break;
                    case DepthImageFormat.Resolution640x480Fps30:
                        width = 640;
                        height = 480;
                        break;
                }
            }
            else
            {
                width = 1;
                height = 1;
            }

            return new Vector2(x / width, y / height);
        }
Exemplo n.º 20
0
 public CanvasCoordMapper(
   Canvas canvas,
   CoordinateMapper mapper,
   Int32Rect colourFrameSize)
 {
     this.canvas = canvas;
     this.mapper = mapper;
     this.colourFrameSize = colourFrameSize;
 }
Exemplo n.º 21
0
        public GlobalCoordinateSystem(KinectSensor sensor)
        {
            this.kinectSensor = sensor;

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.hipMeasurements = new List<Vector3D>();
            this.screenEdgeMeasurements = new List<Vector3D>();
        }
Exemplo n.º 22
0
        public static Vector2 Convert(CoordinateMapper mapper, SkeletonPoint position,
                                  Object format = null)
        {
            float width = 0;
              float height = 0;
              float x = 0;
              float y = 0;

              if (format != null) {
            if (format is ColorImageFormat) {
              var colorFormat = (ColorImageFormat)format;
              var colorPoint = mapper.MapSkeletonPointToColorPoint(position, colorFormat);
              x = colorPoint.X;
              y = colorPoint.Y;

              switch (colorFormat) {
            case ColorImageFormat.RawYuvResolution640x480Fps15:
            case ColorImageFormat.RgbResolution640x480Fps30:
            case ColorImageFormat.YuvResolution640x480Fps15:
              width = 640;
              height = 480;
              break;
            case ColorImageFormat.RgbResolution1280x960Fps12:
              width = 1280;
              height = 960;
              break;
              }
            } else if (format is DepthImageFormat) {
              var depthFormat = (DepthImageFormat)format;
              var depthPoint = mapper.MapSkeletonPointToDepthPoint(position, depthFormat);
              x = depthPoint.X;
              y = depthPoint.Y;

              switch (depthFormat) {
            case DepthImageFormat.Resolution80x60Fps30:
              width = 80;
              height = 60;
              break;
            case DepthImageFormat.Resolution320x240Fps30:
              width = 320;
              height = 240;
              break;
            case DepthImageFormat.Resolution640x480Fps30:
              width = 640;
              height = 480;
              break;
              }
            } else {
              width = 1;
              height = 1;
            }
              }

              return new Vector2(x / width, y / height);
        }
		private OSCTransmitter osc;				// OSC trasmitter

        	public MainWindow()
        	{
			this.sensor = KinectSensor.GetDefault();
			this.bodyCount = this.sensor.BodyFrameSource.BodyCount;
			this.coordinateMapper = this.sensor.CoordinateMapper;					// The coordinate mapper
			FrameDescription frameDescription = this.sensor.DepthFrameSource.FrameDescription;	// The depth (display) extents
			this.displayWidth = frameDescription.Width;						// Size of joint space
			this.displayHeight = frameDescription.Height;
			this.InitializeComponent();
			this.DataContext = this;
		}
Exemplo n.º 24
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescreption = this.kinectSensor.DepthFrameSource.FrameDescription;

            InitializeComponent();
        }
Exemplo n.º 25
0
        public Kinect(MainWindow m)
        {
            mainWindow = m;

            if (KinectSensor.KinectSensors.Count == 0)
            {
                throw new Exception("Kinectを接続してください");
            }

            kinect = KinectSensor.KinectSensors[0];
            coodinateMapper = kinect.CoordinateMapper;
        }
Exemplo n.º 26
0
        internal Finger(DepthPointEx point, CoordinateMapper coordinateMapper)
        {
            ushort depth = (ushort)point.Z;

            DepthPoint = new DepthSpacePoint
            {
                X = point.X,
                Y = point.Y
            };

            ColorPoint = coordinateMapper.MapDepthPointToColorSpace(DepthPoint, (ushort)point.Z);

            CameraPoint = coordinateMapper.MapDepthPointToCameraSpace(DepthPoint, (ushort)point.Z);
        }
Exemplo n.º 27
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="body">Kinect body</param>
        /// <param name="coordinateMapper">Coordinate mapper</param>
        public ColorSpaceKinectJoints(KinectBody body, CoordinateMapper coordinateMapper)
        {
            if (body == null)
                throw new ArgumentNullException("body");
            if (coordinateMapper == null)
                throw new ArgumentNullException("coordinateMapper");

            this.jointPositions = new Dictionary<JointType,ColorSpacePoint>();

            foreach (Joint joint in body.Joints.Values)
            {
                this.jointPositions.Add(joint.JointType, coordinateMapper.MapCameraPointToColorSpace(joint.Position));
            }
        }
Exemplo n.º 28
0
        public KioskInteractionService(ISensorService<KinectSensor> sensorService,
            IDemographicsService demographicsService,
            IItemInteractionService itemInteractionService,
            IBodyTrackingService bodyTrackingService,
            IConfigurationProvider configurationProvider)
        {
            _currentZone = "NoTrack";
            _demographicService = demographicsService;

            _eventHub = new EventHubMessageSender(ConfigurationManager.AppSettings["Azure.Hub.Kiosk"]);

            _sensorService = sensorService;
              //  _telemetryService = telemetryService;

            _itemInteractionService = itemInteractionService;
            _itemInteractionService.ItemInteraction += _itemInteractionService_ItemInteraction;
            _coordinateMapper = _sensorService.Sensor.CoordinateMapper;

            _configurationProvider = configurationProvider;
            _configurationProvider.ConfigurationSettingsChanged += _configurationProvider_ConfigurationSettingsChanged;
            GetConfig();

            _sensorService.StatusChanged += _sensorService_StatusChanged;
            _bodyFrameReader = _sensorService.Sensor.BodyFrameSource.OpenReader();
            if (_bodyFrameReader != null)
                _bodyFrameReader.FrameArrived += _bodyFrameReader_FrameArrived;

            _sensorService.Open();

            _interactionProcessingQueue = new BlockingCollection<KioskStateEventArgs>();
            {
                IObservable<KioskStateEventArgs> ob = _interactionProcessingQueue.
                  GetConsumingEnumerable().
                  ToObservable(TaskPoolScheduler.Default);

                ob.Subscribe(p =>
                {
                    //var temp = Thread.CurrentThread.ManagedThreadId;
                    // This handler will get called whenever
                    // anything appears on myQueue in the future.
                    this.SendIteraction(p);
                    //Debug.Write("Consuming: {0}\n", p);
                });
            }

            _bodyTrackingService = bodyTrackingService;

            CurrentState = KioskStates.NoTrack;
        }
        public ConfigurationTool(SessionManager sessionManager, KinectProcessor kinectProcessor)
        {
            InitializeComponent();

            _sessionManager = sessionManager;

            _coordinateMapper = kinectProcessor.CoordinateMapper;
            kinectProcessor.Reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;
            _localSessions = new List<TabData>();
            _tabList = new TabList();

            // Start timer for flag updating thread
            _timer = new Timer(TimerTick, null, 0, 100);
            _thread = new Thread(updateFlags);
        }
        public RecordWindow()
        {
            // 基本設定の初期化処理

            // Kinect関連初期化処理
            this.kinectSensor = KinectSensor.GetDefault();
            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex);
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription deapthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            this.depthWidth = deapthFrameDescription.Width;
            this.depthHeight = deapthFrameDescription.Height;
            this.colorWidth = colorFrameDescription.Width;
            this.colorHeight = colorFrameDescription.Height;

            this.motionDataHandler = new MotionDataHandler(this.recordPath , this.colorWidth, this.colorHeight, this.depthWidth, this.depthHeight);            

            // 描画関連
            this.drawingGroup = new DrawingGroup();
            this.imageSource = new DrawingImage(this.drawingGroup);
            this.colorBitmap = new WriteableBitmap(this.colorWidth, this.colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            // allocate space to put the pixels being received
            this.colorPixels = new byte[this.colorWidth * this.colorHeight * this.bytesPerPixel];
            this.depthBuffer = new ushort[this.depthWidth * this.depthHeight];
            this.bodyIndexBuffer = new byte[this.depthWidth * this.depthHeight];

            // a bone defined as a line between two joints
            this.bones = Utility.GetBones();

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List<Pen>();
            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            this.kinectSensor.Open();

            this.DataContext = this;

            this.InitializeComponent();
        }
Exemplo n.º 31
0
    public override void InitializeSensor()
    {
        kinectInitialized = false;

        try
        {
            sensor = KinectSensor.GetDefault();
            if (sensor != null)
            {
                coordinateMapper = sensor.CoordinateMapper;
                if (!sensor.IsOpen)
                {
                    sensor.Open();
                }
            }
            else
            {
                throw new Exception("Kinect SDK V2.0 initialization Failed");
            }

            colorFrameReader = sensor.ColorFrameSource.OpenReader();
            if (colorFrameReader == null)
            {
                throw new Exception("Cannot open color stream");
            }

            infraredFrameReader = sensor.InfraredFrameSource.OpenReader();
            if (infraredFrameReader == null)
            {
                throw new Exception("Cannot open long-exposure infrared stream");
            }

            depthFrameReader = sensor.DepthFrameSource.OpenReader();
            if (depthFrameReader == null)
            {
                throw new Exception("Cannot open depth stream");
            }

            DontDestroyOnLoad(gameObject);
        }
        catch (DllNotFoundException e)
        {
            string message = "Please import KinectUnityAddin.dll.";
            Debug.LogError(message);
            Debug.LogError(e.ToString());

            return;
        }
        catch (Exception e)
        {
            string message = e.Message;
            Debug.LogError(message);
            Debug.LogError(e.ToString());
            return;
        }

        InitializeStorage();

        Instance = this;

        Debug.Log("Kinect V2 is initialized");
        kinectInitialized = true;
    }
Exemplo n.º 32
0
 public KinectWall(Canvas canvas, CoordinateMapper coMapper)
 {
     wCanvas    = canvas;
     wallMapper = coMapper;
 }
        /// <summary>
        /// Returns only the bounding points for the face (in order so you can draw a loop)
        /// </summary>
        private List <System.Drawing.Point> FaceBoundaryPoints(IReadOnlyList <CameraSpacePoint> vertices, CoordinateMapper mapper)
        {
            /*if (BOUNDING_HIGH_DETAIL_FACE_POINTS == null)
             *  BOUNDING_HIGH_DETAIL_FACE_POINTS = CalculateBoundingHighDefinitionFacePoints(vertices);*/

            return(BOUNDING_HIGH_DETAIL_FACE_POINTS.Select(x => this.TranslatePoint(vertices[(int)x], mapper)).ToList());
        }
Exemplo n.º 34
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            this.faceSignalsSubscription = this.faceSignals
                                           .Buffer(TimeSpan.FromSeconds(5))
                                           .Subscribe(
                list =>
            {
                var totalCountOfSignals = list.Count(
                    x => x.Key == FaceProperty.LeftEyeClosed ||
                    x.Key == FaceProperty.RightEyeClosed ||
                    x.Key == FaceProperty.LookingAway);

                if (totalCountOfSignals == 0)
                {
                    return;
                }

                var totalCountOfLookAways  = list.Count(x => x.Key == FaceProperty.LookingAway && x.Value == DetectionResult.Yes);
                var totalCountOfClosedEyes = list.Count(
                    x => (x.Key == FaceProperty.LeftEyeClosed || x.Key == FaceProperty.RightEyeClosed) &&
                    x.Value == DetectionResult.Yes);

                var eyesClosedFiveSecs = totalCountOfClosedEyes > 300;
                var lookedAwayFiveSecs = totalCountOfLookAways > 190;

                var eyesOpenFiveSecs = totalCountOfClosedEyes < 300;
                var lookedFiveSecs   = totalCountOfLookAways < 50;

                //Debug.WriteLine("totalCountOfSignals => " + totalCountOfSignals +
                //    " [LookAways => " + eyesOpenFiveSecs + " / " +
                //    "ClosedEyes => " + totalCountOfClosedEyes + "]");

                if (!this.sleeping && eyesClosedFiveSecs)             // && lookedAwayFiveSecs)
                {
                    this.PushEventToUserInterface("collapsed");
                    this.sleeping = true;
                }
                else if (this.sleeping && (eyesOpenFiveSecs || lookedFiveSecs))
                {
                    this.PushEventToUserInterface("awake");
                    this.sleeping = false;
                }
            });

            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // set the display specifics
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];

            // populate face result colors - one for each face index
            this.faceBrush = new List <Brush>()
            {
                Brushes.White,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Red,
                Brushes.LightBlue,
                Brushes.Yellow
            };

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
        /// <summary>
        /// Initializes a new instance of the KinectFaceTrackingResult class from a set of Kinect face points
        /// </summary>
        public KinectFaceTrackingResult(FaceModel faceModel, FaceModel constructedFaceModel, FaceModelBuilderCollectionStatus builderStatus, FaceAlignment faceAlignment, CoordinateMapper mapper)
        {
            this.FaceModel            = faceModel;
            this.ConstructedFaceModel = constructedFaceModel;
            this.BuilderStatus        = builderStatus;
            this.FaceAlignment        = faceAlignment;

            var vertices = faceModel.CalculateVerticesForAlignment(faceAlignment);

            this.ColorSpaceFacePoints = this.FaceBoundaryPoints(vertices, mapper);

            // Calculate facerect manually from facepoints
            var rectX      = this.ColorSpaceFacePoints.Min(x => x.X);
            var rectWidth  = this.ColorSpaceFacePoints.Max(x => x.X) - rectX;
            var rectY      = this.ColorSpaceFacePoints.Min(x => x.Y);
            var rectHeight = this.ColorSpaceFacePoints.Max(x => x.Y) - rectY;

            this.FaceRect = new System.Drawing.Rectangle(rectX, rectY, rectWidth, rectHeight);
        }
        public MainWindow()
        {
            using (StreamWriter sw = new StreamWriter("file.txt", false, System.Text.Encoding.Default)) {
            }

            IPEndPoint ipPoint = new IPEndPoint(IPAddress.Parse("192.168.0.18"), 8080);

            Socket listenSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);

            try {
                listenSocket.Bind(ipPoint);

                listenSocket.Listen(10);

                handler = listenSocket.Accept();

                /*byte[] data = new byte[256];
                 *
                 * string message = "message";
                 * data = Encoding.UTF8.GetBytes(message);
                 * handler.Send(data);*/
                //handler.Shutdown(SocketShutdown.Both);
                //handler.Close();
            }
            catch (Exception ex) {
                Console.WriteLine(ex.Message);
            }


            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Exemplo n.º 37
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
        /// <summary>
        /// Serializes an array of Kinect skeletons into an array of JSON skeletons.
        /// </summary>
        /// <param name="bodies">The Kinect bodies.</param>
        /// <param name="mapper">The coordinate mapper.</param>
        /// <param name="faceFrameResults">The kinect faces.</param>
        /// <returns>A JSON representation of the skeletons.</returns>
        public static string Serialize(this List <Body> bodies, CoordinateMapper mapper, FaceFrameResult[] faceFrameResults)
        {
            JSONBodyCollection jsonBodies = new JSONBodyCollection {
                Bodies = new List <JSONBody>()
            };

            foreach (Body body in bodies)
            {
                JSONBody jsonBody = new JSONBody
                {
                    ID     = body.TrackingId.ToString(),
                    Joints = new List <JSONJoint>()
                };

                foreach (KeyValuePair <JointType, Joint> jointpair in body.Joints)
                {
                    Joint joint = jointpair.Value;

                    DepthSpacePoint depthPoint = mapper.MapCameraPointToDepthSpace(joint.Position);

                    jsonBody.Joints.Add(new JSONJoint
                    {
                        Name = joint.JointType.ToString().ToLower(),
                        MapX = depthPoint.X,
                        MapY = depthPoint.Y,
                        MapZ = joint.Position.Z,
                        X    = body.Joints[joint.JointType].Position.X,
                        Y    = body.Joints[joint.JointType].Position.Y,
                        Z    = body.Joints[joint.JointType].Position.Z,

                        // absolute
                        Quaternion_W = body.JointOrientations[joint.JointType].Orientation.W,
                        Quaternion_X = body.JointOrientations[joint.JointType].Orientation.X,
                        Quaternion_Y = body.JointOrientations[joint.JointType].Orientation.Y,
                        Quaternion_Z = body.JointOrientations[joint.JointType].Orientation.Z,

                        IsTracked = (body.Joints[joint.JointType].TrackingState == TrackingState.Tracked)
                    });
                }

                // faceとbodyの関連付け
                FaceFrameResult associatedFace = null;
                foreach (var f in faceFrameResults)
                {
                    if (f == null)
                    {
                        continue;
                    }
                    if (f.TrackingId == body.TrackingId)
                    {
                        associatedFace = f;
                        break;
                    }
                }
                if (associatedFace != null)
                {
                    jsonBody.Face = new JSONFace
                    {
                        Quaternion_W = associatedFace.FaceRotationQuaternion.W,
                        Quaternion_X = associatedFace.FaceRotationQuaternion.X,
                        Quaternion_Y = associatedFace.FaceRotationQuaternion.Y,
                        Quaternion_Z = associatedFace.FaceRotationQuaternion.Z,

                        MouthOpened    = (associatedFace.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Yes),
                        MouthMoved     = (associatedFace.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Yes),
                        LeftEyeClosed  = (associatedFace.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.LeftEyeClosed] == DetectionResult.Yes),
                        RightEyeClosed = (associatedFace.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Maybe || associatedFace.FaceProperties[FaceProperty.RightEyeClosed] == DetectionResult.Yes)
                    };
                }

                // 立っている, 座っている, 寝ている の判定
                int posture = PostureDetector.Detect(body);
                jsonBody.Posture = posture;

                jsonBodies.Bodies.Add(jsonBody);
            }

            return(Serialize(jsonBodies));
        }
        /// <summary>
        /// Updates the finger-detection engine with the new data.
        /// </summary>
        /// <param name="data">A pointer to an array of depth data.</param>
        /// <param name="body">The body to search for hands and fingers.</param>
        public unsafe void Update(ushort *data, Body body)
        {
            if (data == null || body == null)
            {
                return;
            }

            if (DepthWidth == 0)
            {
                DepthWidth = DEFAULT_DEPTH_WIDTH;
            }

            if (DepthHeight == 0)
            {
                DepthHeight = DEFAULT_DEPTH_HEIGHT;
            }

            if (_handPixelsLeft == null)
            {
                _handPixelsLeft = new byte[DepthWidth * DepthHeight];
            }

            if (_handPixelsRight == null)
            {
                _handPixelsRight = new byte[DepthWidth * DepthHeight];
            }

            //Hand handLeft = null;
            Hand handRight = null;

            //Joint jointHandLeft = body.Joints[JointType.HandLeft];
            Joint jointHandRight = body.Joints[JointType.HandRight];
            //Joint jointWristLeft = body.Joints[JointType.WristLeft];
            Joint jointWristRight = body.Joints[JointType.WristRight];
            //Joint jointTipLeft = body.Joints[JointType.HandTipLeft];
            Joint jointTipRight = body.Joints[JointType.HandTipRight];
            //Joint jointThumbLeft = body.Joints[JointType.ThumbLeft];
            Joint jointThumbRight = body.Joints[JointType.ThumbRight];

            //DepthSpacePoint depthPointHandLeft = CoordinateMapper.MapCameraPointToDepthSpace(jointHandLeft.Position);
            //DepthSpacePoint depthPointWristLeft = CoordinateMapper.MapCameraPointToDepthSpace(jointWristLeft.Position);
            //DepthSpacePoint depthPointTipLeft = CoordinateMapper.MapCameraPointToDepthSpace(jointTipLeft.Position);
            //DepthSpacePoint depthPointThumbLeft = CoordinateMapper.MapCameraPointToDepthSpace(jointThumbLeft.Position);

            DepthSpacePoint depthPointHandRight  = CoordinateMapper.MapCameraPointToDepthSpace(jointHandRight.Position);
            DepthSpacePoint depthPointWristRight = CoordinateMapper.MapCameraPointToDepthSpace(jointWristRight.Position);
            DepthSpacePoint depthPointTipRight   = CoordinateMapper.MapCameraPointToDepthSpace(jointTipRight.Position);
            DepthSpacePoint depthPointThumbRight = CoordinateMapper.MapCameraPointToDepthSpace(jointThumbRight.Position);

            //float handLeftX = depthPointHandLeft.X;
            //float handLeftY = depthPointHandLeft.Y;
            //float wristLeftX = depthPointWristLeft.X;
            //float wristLeftY = depthPointWristLeft.Y;
            //float tipLeftX = depthPointTipLeft.X;
            //float tipLeftY = depthPointTipLeft.Y;
            //float thumbLeftX = depthPointThumbLeft.X;
            //float thumbLeftY = depthPointThumbLeft.Y;

            float handRightX  = depthPointHandRight.X;
            float handRightY  = depthPointHandRight.Y;
            float wristRightX = depthPointWristRight.X;
            float wristRightY = depthPointWristRight.Y;
            float tipRightX   = depthPointTipRight.X;
            float tipRightY   = depthPointTipRight.Y;
            float thumbRightX = depthPointThumbRight.X;
            float thumbRightY = depthPointThumbRight.Y;

            //bool searchForLeftHand = DetectLeftHand && !float.IsInfinity(handLeftX) && !float.IsInfinity(handLeftY) && !float.IsInfinity(wristLeftX) && !float.IsInfinity(wristLeftY) && !float.IsInfinity(tipLeftX) && !float.IsInfinity(tipLeftY) && !float.IsInfinity(thumbLeftX) && !float.IsInfinity(thumbLeftY);
            bool searchForRightHand = DetectRightHand && !float.IsInfinity(handRightX) && !float.IsInfinity(handRightY) && !float.IsInfinity(wristRightX) && !float.IsInfinity(wristRightY) && !float.IsInfinity(tipRightX) && !float.IsInfinity(tipRightY) && !float.IsInfinity(thumbRightX) && !float.IsInfinity(thumbRightY);

            if (/*searchForLeftHand ||*/ searchForRightHand)
            {
                //double distanceLeft = searchForLeftHand ? CalculateDistance(handLeftX, handLeftY, tipLeftX, tipLeftY, thumbLeftX, thumbLeftY) : 0.0;
                double distanceRight = searchForRightHand ? CalculateDistance(handRightX, handRightY, tipRightX, tipRightY, thumbRightX, thumbRightY) : 0.0;

                //double angleLeft = searchForLeftHand ? DepthPointEx.Angle(wristLeftX, wristLeftY, wristLeftX, 0, handLeftX, handLeftY) : 0.0;
                double angleRight = searchForRightHand ? DepthPointEx.Angle(wristRightX, wristRightY, wristRightX, 0, handRightX, handRightY) : 0.0;

                //int minLeftX = searchForLeftHand ? (int)(handLeftX - distanceLeft) : 0;
                //int minLeftY = searchForLeftHand ? (int)(handLeftY - distanceLeft) : 0;
                //int maxLeftX = searchForLeftHand ? (int)(handLeftX + distanceLeft) : 0;
                //int maxLeftY = searchForLeftHand ? (int)(handLeftY + distanceLeft) : 0;

                int minRightX = searchForRightHand ? (int)(handRightX - distanceRight) : 0;
                int minRightY = searchForRightHand ? (int)(handRightY - distanceRight) : 0;
                int maxRightX = searchForRightHand ? (int)(handRightX + distanceRight) : 0;
                int maxRightY = searchForRightHand ? (int)(handRightY + distanceRight) : 0;

                //float depthLeft = jointHandLeft.Position.Z * 1000; // m to mm
                float depthRight = jointHandRight.Position.Z * 1000;

                for (int i = 0; i < DepthWidth * DepthHeight; ++i)
                {
                    ushort depth = data[i];

                    int depthX = i % DepthWidth;
                    int depthY = i / DepthWidth;

                    bool isInBounds = depth >= MIN_DEPTH && depth <= MAX_DEPTH;

                    //bool conditionLeft = depth >= depthLeft - DEPTH_THRESHOLD &&
                    //                     depth <= depthLeft + DEPTH_THRESHOLD &&
                    //                     depthX >= minLeftX && depthX <= maxLeftX &&
                    //                     depthY >= minLeftY && depthY <= maxLeftY;

                    bool conditionRight = depth >= depthRight - DEPTH_THRESHOLD &&
                                          depth <= depthRight + DEPTH_THRESHOLD &&
                                          depthX >= minRightX && depthX <= maxRightX &&
                                          depthY >= minRightY && depthY <= maxRightY;

                    //_handPixelsLeft[i] = (byte)(isInBounds && searchForLeftHand && conditionLeft ? 255 : 0);
                    _handPixelsRight[i] = (byte)(isInBounds && searchForRightHand && conditionRight ? 255 : 0);
                }

                List <DepthPointEx> contourLeft  = new List <DepthPointEx>();
                List <DepthPointEx> contourRight = new List <DepthPointEx>();

                for (int i = 0; i < DepthWidth * DepthHeight; ++i)
                {
                    ushort depth = data[i];

                    int depthX = i % DepthWidth;
                    int depthY = i / DepthWidth;

                    //if (searchForLeftHand)
                    //{
                    //    if (_handPixelsLeft[i] != 0)
                    //    {
                    //        byte top = i - DepthWidth >= 0 ? _handPixelsLeft[i - DepthWidth] : (byte)0;
                    //        byte bottom = i + DepthWidth < _handPixelsLeft.Length ? _handPixelsLeft[i + DepthWidth] : (byte)0;
                    //        byte left = i - 1 >= 0 ? _handPixelsLeft[i - 1] : (byte)0;
                    //        byte right = i + 1 < _handPixelsLeft.Length ? _handPixelsLeft[i + 1] : (byte)0;

                    //        bool isInContour = top == 0 || bottom == 0 || left == 0 || right == 0;

                    //        if (isInContour)
                    //        {
                    //            contourLeft.Add(new DepthPointEx { X = depthX, Y = depthY, Z = depth });
                    //        }
                    //    }
                    //}

                    if (searchForRightHand)
                    {
                        if (_handPixelsRight[i] != 0)
                        {
                            byte top    = i - DepthWidth >= 0 ? _handPixelsRight[i - DepthWidth] : (byte)0;
                            byte bottom = i + DepthWidth < _handPixelsRight.Length ? _handPixelsRight[i + DepthWidth] : (byte)0;
                            byte left   = i - 1 >= 0 ? _handPixelsRight[i - 1] : (byte)0;
                            byte right  = i + 1 < _handPixelsRight.Length ? _handPixelsRight[i + 1] : (byte)0;

                            bool isInContour = top == 0 || bottom == 0 || left == 0 || right == 0;

                            if (isInContour)
                            {
                                contourRight.Add(new DepthPointEx {
                                    X = depthX, Y = depthY, Z = depth
                                });
                            }
                        }
                    }
                }

                //if (searchForLeftHand)
                //{
                //    handLeft = GetHand(body.TrackingId, body.HandLeftState, contourLeft, angleLeft, wristLeftX, wristLeftY);
                //}

                if (searchForRightHand)
                {
                    handRight = GetHand(body.TrackingId, body.HandRightState, contourRight, angleRight, wristRightX, wristRightY);
                }
            }

            if (/*handLeft != null ||*/ handRight != null)
            {
                HandCollection hands = new HandCollection
                {
                    TrackingId = body.TrackingId,
                    //HandLeft = handLeft,
                    HandRight = handRight
                };

                if (HandsDetected != null)
                {
                    HandsDetected(this, hands);
                }
            }
        }
Exemplo n.º 40
0
        public KinectApp()
        {
            InitializeComponent();
            kinectSensor = KinectSensor.GetDefault();

            this.KeyPreview = true;
            this.KeyDown   += KinectApp_KeyDown;

            var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

            colorPixels  = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
            binaryPixels = new byte[binHeight * binWidth];
            uDepthPixels = new ushort[depthHeight * depthWidth];
            mappedDepth  = new DepthSpacePoint[colorHeight * colorWidth];
            outputImage  = new byte[1920 * 1080 / 2];

            colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();
            depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            binImage           = new Image <Gray, byte>(binWidth, binHeight);
            crImage            = new Image <Gray, byte>(binWidth, binHeight);
            colorImage         = new Image <Rgb, byte>(binWidth, binHeight);
            imageToDisplay     = new Image <Gray, byte>(binWidth, binHeight);
            imageToDisplay.Ptr = crImage.Ptr;

            this.colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();
            this.depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();

            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
            this.depthFrameReader.FrameArrived += this.Reader_FrameArrived;
            coordinateMapper = kinectSensor.CoordinateMapper;

            coordFont   = new MCvFont(FONT.CV_FONT_HERSHEY_DUPLEX, 1, 1);
            textFont    = new MCvFont(FONT.CV_FONT_HERSHEY_SIMPLEX, 0.5, 0.5);
            label1.Text = "Threshold: " + threshold.ToString();

            try
            {
                if (!kinectSensor.IsOpen)
                {
                    kinectSensor.Open();
                }
            }
            catch (IOException)
            {
                kinectSensor = null;
            }
            if (!kinectSensor.IsAvailable)
            {
                StatusLabel.ForeColor = Color.Red;
                StatusLabel.Text      = "Kinect not connected";
                Bitmap            bitmap = new Bitmap(Image.FromFile("nosignal.jpg"));
                Image <Rgb, byte> image  = new Image <Rgb, byte>(bitmap);
                imageBox1.Image = image;
            }

            else
            {
                StatusLabel.ForeColor = Color.Green;
                StatusLabel.Text      = "Kinect connected";
            }
        }
        static void DepthFrameReady(Object sender, DepthImageFrame imageFrame, ColorImageFrame cimgframe, SkeletonPoint sLoc)
        {
            //Console.WriteLine("Depth");
            if (imageFrame != null && cimgframe != null)
            {
                //form.GetPictureBox1().Image = DepthToBitmap(imageFrame);

                if (takeSnapshot)
                {
                    Point[,] pointarr = new Point[(int)HEAD_SIZE_PIXELS * 2 + 1, (int)HEAD_SIZE_PIXELS * 2 + 1];
                    block             = true;
                    List <Point>  plist = new List <Point>();
                    Bitmap        bmap  = ImageToBitmap(cimgframe, false);
                    DateTime      d     = DateTime.Now;
                    Color         col;
                    List <String> slist = new List <String>();
                    String        dtm   = d.ToString();
                    String        dtmr  = "data" + dtm + "Reverse.txt";
                    dtm = "data" + dtm + ".txt";
                    Console.WriteLine(dtm);
                    CoordinateMapper cm = new CoordinateMapper(sensor);
                    snapshotDepthData = GetDepthArray(imageFrame);
                    double faceZ = (double)snapshotDepthData[dp.Y, dp.X] / 1000D;
                    using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"data.txt"))
                        using (System.IO.StreamWriter colorfile = new System.IO.StreamWriter(@"colordata.txt"))
                            using (System.IO.StreamWriter fileReverse = new System.IO.StreamWriter(@"dataReverse.txt"))
                            {
                                for (int x = 0; x < snapshotDepthData.GetLength(1); x++)
                                {
                                    for (int y = 0; y < snapshotDepthData.GetLength(0); y++)
                                    {
                                        if (Math.Abs(x - dp.X) <= HEAD_SIZE_PIXELS && Math.Abs(y - dp.Y) <= HEAD_SIZE_PIXELS)
                                        {
                                            /*dp.X = x;
                                             * dp.Y = y;
                                             * ColorImagePoint c = cm.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, dp, ColorImageFormat.RgbResolution640x480Fps30);
                                             * c = imageFrame.MapToColorImagePoint(x, y, ColorImageFormat.RgbResolution640x480Fps30);
                                             * Console.WriteLine("dp.X: " + dp.X +"dp.Y: " + dp.Y +"c.X: " + c.X +"c.Y: " + c.Y);
                                             * if(c.X < 640 && c.X >= 0 && c.Y < 480 && c.Y>=0)
                                             * {
                                             *  col = bmap.GetPixel(c.X, c.Y);
                                             * }
                                             * else
                                             * {
                                             *  col = bmap.GetPixel(nearX, nearY);
                                             * }*/

                                            /* if (Math.Abs(x - dp.X) <= 10 && Math.Abs(y - dp.Y) <= 10)
                                             * {
                                             *   col = Color.LightYellow;
                                             * }*/

                                            col = bmap.GetPixel(x + (cp.X - dp.X), y + (cp.Y - dp.Y));
                                            float r, g, b;
                                            r = (float)col.R;
                                            r = r / 255;
                                            g = (float)col.G;
                                            g = g / 255;
                                            b = (float)col.B;
                                            b = b / 255;
                                            double newX = -((double)((x - imageFrame.Width / 2) * HORIZONTAL_TAN * snapshotDepthData[y, x])) / (1000 * (double)(imageFrame.Width / 2));
                                            double newY = ((double)((y - imageFrame.Height / 2) * VERTICAL_TAN * snapshotDepthData[y, x]) / (1000 * (double)(imageFrame.Height / 2)));
                                            double newZ = (double)snapshotDepthData[y, x] / 1000D;
                                            if (Math.Abs(newZ - faceZ) <= HEAD_SIZE / 2)
                                            {
                                                r = 0;
                                                g = 0;
                                                b = 0;
                                                file.WriteLine(newX + " " + newY + " " + newZ);
                                                colorfile.WriteLine(newX + " " + newY + " " + newZ + " " + r.ToString() + " " + g.ToString() + " " + b.ToString() + " " + "1.0");
                                                String s = new String(new char[] { });
                                                s = newX + " " + newY + " " + newZ + " " + r.ToString() + " " + g.ToString() + " " + b.ToString() + " " + "1.0";
                                                slist.Add(s);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y] = new Point(newX, newY, newZ, col);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y].setArrX(x + (int)HEAD_SIZE_PIXELS - dp.X);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y].setArrY(y + (int)HEAD_SIZE_PIXELS - dp.Y);
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y].setListInd(slist.Count - 1);
                                                plist.Add(pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y]);
                                            }
                                            else
                                            {
                                                pointarr[x + (int)HEAD_SIZE_PIXELS - dp.X, y + (int)HEAD_SIZE_PIXELS - dp.Y] = null;
                                            }
                                        }
                                    }
                                }
                            }
                    int     vert = slist.Count;
                    OffData dat  = new OffData(plist, pointarr);
                    dat.getFaces();
                    dat.writeToFile();

                    using (System.IO.StreamWriter fileoff = new System.IO.StreamWriter(@"fulldatacolor.off"))
                    {
                        fileoff.WriteLine("COFF");
                        fileoff.WriteLine(vert.ToString() + "\t0\t0");
                        int i = 0;
                        for (i = 0; i < vert; ++i)
                        {
                            fileoff.WriteLine(slist.ElementAt(i));
                        }
                    }
                    ProcessStartInfo startInfo = new ProcessStartInfo();
                    startInfo.FileName    = @"rcocone-win.exe";
                    startInfo.Arguments   = @"data.txt output";
                    startInfo.WindowStyle = ProcessWindowStyle.Hidden;
                    using (Process proc = Process.Start(startInfo))
                    {
                        proc.WaitForExit();

                        // Retrieve the app's exit code
                        //exitCode = proc.ExitCode;
                    }
                    //Process.Start(startInfo);
                    takeSnapshot = false;
                    block        = false;
                }

                imageFrame.Dispose();
            }
        }
Exemplo n.º 42
0
 public Skeleton(int displayWidth, int displayHeight, CoordinateMapper coordinateMapper)
 {
     this.DisplayWidth     = displayWidth;
     this.DisplayHeight    = displayHeight;
     this.CoordinateMapper = coordinateMapper;
 }
        /// <summary>
        /// MainWindow实例化
        /// </summary>
        public MainWindow()
        {
            // 将默认的Kinect作为实例化的Kinect
            this.kinectSensor = KinectSensor.GetDefault();

            // 获取坐标映射器
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // 获取深度(显示)范围
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // 获取关节空间大小
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // 初始化目前所用的Kinect的骨骼帧阅读器
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // 定义两个关节点之间的骨头(实际上是一条线)
            this.bones = new List <Tuple <JointType, JointType> >();

            // 人体躯干
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // 右臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // 左臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // 填充人体颜色,每个人体填充一个颜色
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // 当Kinect状态更改时,激发IsAvailableChanged事件,调用Sensor_IsAvailableChanged函数判断Kinect当前状态
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // 启动Kinect
            this.kinectSensor.Open();

            // 设置Kinect的状态
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // 创建将用于绘图的绘图组
            this.drawingGroup = new DrawingGroup();

            // 创建可以在图像控件中使用的图像源
            this.imageSource = new DrawingImage(this.drawingGroup);

            // 在这个简单的例子中,使用window对象作为视图模型,不知道什么意思
            this.DataContext = this;

            // 初始化窗口的组件(控件)
            this.InitializeComponent();
        }
    public KinectInterop.SensorData OpenDefaultSensor(KinectInterop.FrameSource dwFlags, float sensorAngle, bool bUseMultiSource)
    {
        KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
        //sensorFlags = dwFlags;

        kinectSensor = KinectSensor.GetDefault();
        if (kinectSensor == null)
        {
            return(null);
        }

        coordMapper = kinectSensor.CoordinateMapper;

        sensorData.bodyCount  = kinectSensor.BodyFrameSource.BodyCount;
        sensorData.jointCount = 25;

        if ((dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
        {
            if (!bUseMultiSource)
            {
                bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();
            }

            bodyData = new Body[sensorData.bodyCount];
        }

        var frameDesc = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba);

        sensorData.colorImageWidth  = frameDesc.Width;
        sensorData.colorImageHeight = frameDesc.Height;

        if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
        {
            if (!bUseMultiSource)
            {
                colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();
            }

            sensorData.colorImage = new byte[frameDesc.BytesPerPixel * frameDesc.LengthInPixels];
        }

        sensorData.depthImageWidth  = kinectSensor.DepthFrameSource.FrameDescription.Width;
        sensorData.depthImageHeight = kinectSensor.DepthFrameSource.FrameDescription.Height;

        if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
        {
            if (!bUseMultiSource)
            {
                depthFrameReader = kinectSensor.DepthFrameSource.OpenReader();
            }

            sensorData.depthImage = new ushort[kinectSensor.DepthFrameSource.FrameDescription.LengthInPixels];
        }

        if ((dwFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
        {
            if (!bUseMultiSource)
            {
                bodyIndexFrameReader = kinectSensor.BodyIndexFrameSource.OpenReader();
            }

            sensorData.bodyIndexImage = new byte[kinectSensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
        }

        if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0)
        {
            if (!bUseMultiSource)
            {
                infraredFrameReader = kinectSensor.InfraredFrameSource.OpenReader();
            }

            sensorData.infraredImage = new ushort[kinectSensor.InfraredFrameSource.FrameDescription.LengthInPixels];
        }

        if (!kinectSensor.IsOpen)
        {
            kinectSensor.Open();
        }

        if (bUseMultiSource && dwFlags != KinectInterop.FrameSource.TypeNone && kinectSensor.IsOpen)
        {
            multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader((FrameSourceTypes)dwFlags);
        }

        return(sensorData);
    }
Exemplo n.º 45
0
 public SkeletonDisplayManager(CoordinateMapper mapper, Canvas root)
 {
     rootCanvas  = root;
     this.mapper = mapper;
 }
        static void InitializeSensor()
        {
            var sensor = Sensor;

            if (sensor != null)
            {
                return;
            }

            try
            {
                sensor = KinectSensor.GetDefault();
                if (sensor == null)
                {
                    return;
                }

                var reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);
                reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                coordinateMapper = sensor.CoordinateMapper;

                FrameDescription depthFrameDescription = sensor.DepthFrameSource.FrameDescription;

                int depthWidth  = depthFrameDescription.Width;
                int depthHeight = depthFrameDescription.Height;

                /*FrameDescription colorFrameDescription = sensor.ColorFrameSource.FrameDescription;
                 *
                 * int colorWidth = colorFrameDescription.Width;
                 * int colorHeight = colorFrameDescription.Height;
                 *
                 * colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];
                 *
                 * bitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);
                 *
                 * // Calculate the WriteableBitmap back buffer size
                 * bitmapBackBufferSize = (uint)((bitmap.BackBufferStride * (bitmap.PixelHeight - 1)) + (bitmap.PixelWidth * bytesPerPixel));*/


                sensor.Open();

                Sensor = sensor;

                if (context == null)
                {
                    contexThread = new Thread(() =>
                    {
                        context = new KinectCamApplicationContext();
                        Application.Run(context);
                    });
                    refreshThread = new Thread(() =>
                    {
                        while (true)
                        {
                            Thread.Sleep(250);
                            Application.DoEvents();
                        }
                    });
                    contexThread.IsBackground  = true;
                    refreshThread.IsBackground = true;
                    contexThread.SetApartmentState(ApartmentState.STA);
                    refreshThread.SetApartmentState(ApartmentState.STA);
                    contexThread.Start();
                    refreshThread.Start();
                }
            }
            catch
            {
                Trace.WriteLine("Error of enable the Kinect sensor!");
            }
        }
Exemplo n.º 47
0
        /// <summary>
        /// 初始化主窗口类的一个新实例.
        /// </summary>
        public MainWindow()
        {
            // 目前支持的一个传感器
            this.kinectSensor = KinectSensor.GetDefault();

            // 得到坐标映射器
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // 得到的深度(显示)区段
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // 得到关节空间的大小
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // 打开读取的帧
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();
            //打开颜色读取帧
            //if (COLORFRAME)
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // 骨头被定义为两个关节之间的一条线
            this.bones = new List <Tuple <JointType, JointType> >();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // 躯干
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            //右臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // 左臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // 右腿
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // 左腿
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // 身体填充颜色,每个体形指数一个
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            //打开Kinect摄像头
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // 使用窗口对象作为视图模型在这个简单的例子
            this.DataContext = this;

            // 初始化
            InitializeComponent();
        }
Exemplo n.º 48
0
        /// <summary>
        /// Initializes a new instance of the KinectBodyView class
        /// </summary>
        /// <param name="kinectSensor">Active instance of the KinectSensor</param>
        public DrawSkeleton(KinectSensor kinectSensor, int width, int heigt)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            // get the coordinate mapper
            this.coordinateMapper = kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width - 20;
            this.displayHeight = frameDescription.Height - 20;
            //this.displayWidth = width;
            //this.displayHeight = height;

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            //this.bodyColors.Add(new Pen(Brushes.Red, 6));
            //this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            //this.bodyColors.Add(new Pen(Brushes.Green, 6));
            //this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            //this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            //this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            this.bodyColors.Add(new Pen(Brushes.Yellow, 4));
            this.bodyColors.Add(new Pen(Brushes.Yellow, 4));
            this.bodyColors.Add(new Pen(Brushes.Yellow, 4));
            this.bodyColors.Add(new Pen(Brushes.Yellow, 4));
            this.bodyColors.Add(new Pen(Brushes.Yellow, 4));
            this.bodyColors.Add(new Pen(Brushes.Yellow, 4));

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);
        }
Exemplo n.º 49
0
        /// <summary>
        /// Process the color and depth inputs, converting the color into the depth space
        /// </summary>
        public unsafe FusionColorImageFrame MapColorToDepth(ushort[] depthImagePixels, byte[] colorImagePixels, CoordinateMapper cm, bool mirrorDepth = true)
        {
            cm.MapDepthFrameToColorSpace(depthImagePixels, this.colorCoordinates);

            this.mirrorDepth = mirrorDepth;

            lock (this.rawDataLock)
            {
                // Fill in the visibility depth map.
                Array.Clear(this.depthVisibilityTestMap, 0, this.depthVisibilityTestMap.Length);
                fixed(ushort *ptrDepthVisibilityPixels = this.depthVisibilityTestMap, ptrDepthPixels = depthImagePixels)
                {
                    for (int index = 0; index < depthImagePixels.Length; ++index)
                    {
                        if (!float.IsInfinity(this.colorCoordinates[index].X) && !float.IsInfinity(this.colorCoordinates[index].Y))
                        {
                            int x = (int)(System.Math.Floor(this.colorCoordinates[index].X + 0.5f) / ColorDownsampleFactor);
                            int y = (int)(System.Math.Floor(this.colorCoordinates[index].Y + 0.5f) / ColorDownsampleFactor);

                            if ((x >= 0) && (x < this.depthVisibilityTestMapWidth) &&
                                (y >= 0) && (y < this.depthVisibilityTestMapHeight))
                            {
                                int depthVisibilityTestIndex = (y * this.depthVisibilityTestMapWidth) + x;
                                if ((ptrDepthVisibilityPixels[depthVisibilityTestIndex] == 0) ||
                                    (ptrDepthVisibilityPixels[depthVisibilityTestIndex] > ptrDepthPixels[index]))
                                {
                                    ptrDepthVisibilityPixels[depthVisibilityTestIndex] = ptrDepthPixels[index];
                                }
                            }
                        }
                    }
                }

                if (this.mirrorDepth)
                {
                    // Here we make use of unsafe code to just copy the whole pixel as an int for performance reasons, as we do
                    // not need access to the individual rgba components.
                    fixed(byte *ptrColorPixels = colorImagePixels)
                    {
                        int *rawColorPixels = (int *)ptrColorPixels;

                        Parallel.For(
                            0,
                            KinectSettings.DEPTH_HEIGHT,
                            y =>
                        {
                            int destIndex = y * KinectSettings.DEPTH_WIDTH;

                            for (int x = 0; x < KinectSettings.DEPTH_WIDTH; ++x, ++destIndex)
                            {
                                // calculate index into depth array
                                int colorInDepthX            = (int)System.Math.Floor(colorCoordinates[destIndex].X + 0.5);
                                int colorInDepthY            = (int)System.Math.Floor(colorCoordinates[destIndex].Y + 0.5);
                                int depthVisibilityTestX     = (int)(colorInDepthX / ColorDownsampleFactor);
                                int depthVisibilityTestY     = (int)(colorInDepthY / ColorDownsampleFactor);
                                int depthVisibilityTestIndex = (depthVisibilityTestY * this.depthVisibilityTestMapWidth) + depthVisibilityTestX;

                                // make sure the depth pixel maps to a valid point in color space
                                if (colorInDepthX >= 0 && colorInDepthX < KinectSettings.COLOR_WIDTH && colorInDepthY >= 0 &&
                                    colorInDepthY < KinectSettings.COLOR_HEIGHT && depthImagePixels[destIndex] != 0)
                                {
                                    ushort depthTestValue = this.depthVisibilityTestMap[depthVisibilityTestIndex];

                                    if ((depthImagePixels[destIndex] - depthTestValue) < DepthVisibilityTestThreshold)
                                    {
                                        // Calculate index into color array
                                        int sourceColorIndex = colorInDepthX + (colorInDepthY * KinectSettings.COLOR_WIDTH);

                                        // Copy color pixel
                                        this.resampledColorImagePixelsAlignedToDepth[destIndex] = rawColorPixels[sourceColorIndex];
                                    }
                                    else
                                    {
                                        this.resampledColorImagePixelsAlignedToDepth[destIndex] = 0;
                                    }
                                }
                                else
                                {
                                    this.resampledColorImagePixelsAlignedToDepth[destIndex] = 0;
                                }
                            }
                        });
                    }
                }
                else
                {
                    // Here we make use of unsafe code to just copy the whole pixel as an int for performance reasons, as we do
                    // not need access to the individual rgba components.
                    fixed(byte *ptrColorPixels = colorImagePixels)
                    {
                        int *rawColorPixels = (int *)ptrColorPixels;

                        // Horizontal flip the color image as the standard depth image is flipped internally in Kinect Fusion
                        // to give a viewpoint as though from behind the Kinect looking forward by default.
                        Parallel.For(
                            0,
                            KinectSettings.DEPTH_HEIGHT,
                            y =>
                        {
                            int destIndex        = y * KinectSettings.DEPTH_WIDTH;
                            int flippedDestIndex = destIndex + (KinectSettings.DEPTH_WIDTH - 1);     // horizontally mirrored

                            for (int x = 0; x < KinectSettings.DEPTH_WIDTH; ++x, ++destIndex, --flippedDestIndex)
                            {
                                // calculate index into depth array
                                int colorInDepthX            = (int)System.Math.Floor(colorCoordinates[destIndex].X + 0.5);
                                int colorInDepthY            = (int)System.Math.Floor(colorCoordinates[destIndex].Y + 0.5);
                                int depthVisibilityTestX     = (int)(colorInDepthX / ColorDownsampleFactor);
                                int depthVisibilityTestY     = (int)(colorInDepthY / ColorDownsampleFactor);
                                int depthVisibilityTestIndex = (depthVisibilityTestY * this.depthVisibilityTestMapWidth) + depthVisibilityTestX;

                                // make sure the depth pixel maps to a valid point in color space
                                if (colorInDepthX >= 0 && colorInDepthX < KinectSettings.COLOR_WIDTH && colorInDepthY >= 0 &&
                                    colorInDepthY < KinectSettings.COLOR_HEIGHT && depthImagePixels[destIndex] != 0)
                                {
                                    ushort depthTestValue = this.depthVisibilityTestMap[depthVisibilityTestIndex];

                                    if ((depthImagePixels[destIndex] - depthTestValue) < DepthVisibilityTestThreshold)
                                    {
                                        // Calculate index into color array- this will perform a horizontal flip as well
                                        int sourceColorIndex = colorInDepthX + (colorInDepthY * KinectSettings.COLOR_WIDTH);

                                        // Copy color pixel
                                        this.resampledColorImagePixelsAlignedToDepth[flippedDestIndex] = rawColorPixels[sourceColorIndex];
                                    }
                                    else
                                    {
                                        this.resampledColorImagePixelsAlignedToDepth[flippedDestIndex] = 0;
                                    }
                                }
                                else
                                {
                                    this.resampledColorImagePixelsAlignedToDepth[flippedDestIndex] = 0;
                                }
                            }
                        });
                    }
                }
            }
            ResampledColorFrameDepthAligned.CopyPixelDataFrom(this.resampledColorImagePixelsAlignedToDepth);
            return(ResampledColorFrameDepthAligned);
        }
Exemplo n.º 50
0
 public RockViewModel(float xP, float yP, float zP,
                      double widthOnCanvas, double heightOnCanvas, Canvas canvas, CoordinateMapper coorMap) :
     this(new CameraSpacePoint {
     X = xP, Y = yP, Z = zP
 },
          new Size(widthOnCanvas, heightOnCanvas), canvas, coorMap)
 {
 }
        private void InitializeBodyFrameDisplayVariables()
        {
            // get the coordinate mapper
            coordinateMapper = sensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = sensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            displayWidth  = frameDescription.Width;
            displayHeight = frameDescription.Height;

            // open the reader for the body frames
            bodyFrameReader = sensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            bones = new List <Tuple <JointType, JointType> >(
                new Tuple <JointType, JointType>[] {
                // torso
                new Tuple <JointType, JointType>(JointType.Head, JointType.Neck),
                new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid),
                new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft),
                new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight),
                new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft),

                // right arm
                new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight),
                new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight),
                new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight),
                new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight),
                new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight),

                // Left Arm
                new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft),
                new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft),
                new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft),
                new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft),
                new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft),

                // Right Leg
                new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight),
                new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight),
                new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight),

                // Left Leg
                new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft),
                new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft),
                new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft)
            }
                );

            // populate body colors, one for each BodyIndex
            bodyColors = new List <Pen>();

            bodyColors.Add(new Pen(Brushes.Red, 6));
            bodyColors.Add(new Pen(Brushes.Orange, 6));
            bodyColors.Add(new Pen(Brushes.Green, 6));
            bodyColors.Add(new Pen(Brushes.Blue, 6));
            bodyColors.Add(new Pen(Brushes.Indigo, 6));
            bodyColors.Add(new Pen(Brushes.Violet, 6));

            // Create the drawing group we'll use for drawing
            drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            imageSource = new DrawingImage(drawingGroup);

            // use the window object as the view model in this simple example
            DataContext = this;
        }
        /// <summary>
        ///     Gets the 2D point representative dictionay from list of joints as a Dictionary of JointType to a Tuple of 2D point and Tracked State
        /// </summary>
        /// <param name="joints"></param>
        /// <param name="_mode"></param>
        /// <param name="coordinateMapper"></param>
        /// <returns>Dictionary of JointType to Tuple. Tuple is of form (2D Point, !Not Tracked) </returns>
        public static Dictionary <JointType, (Point joint, bool tracked, float depth)> GetPointDictFromJoints(this Body body, CoordinateMapper coordinateMapper)
        {
            Dictionary <JointType, (Point joint, bool tracked, float depth)> dict = new Dictionary <JointType, (Point joint, bool tracked, float depth)>();

            foreach (KeyValuePair <JointType, Joint> pair in body.Joints)
            {
                dict.Add(pair.Key, pair.Value.GetPointTupleFromJoint(coordinateMapper));
            }

            return(dict);
        }
 private static (Point point, bool tracked, float depth) GetPointTupleFromJoint(this Joint joint, CoordinateMapper coordinateMapper)
 {
     (Point point, float depth)point           = joint.ToCoordinatePoint(coordinateMapper);
     (Point point, bool tracked, float depth)p = (point : point.point, tracked : joint.TrackingState != TrackingState.NotTracked, point.depth);
     return(p);
     //return new Tuple<Point, bool>(joint.ToCoordinatePoint(coordinateMapper), joint.TrackingState != TrackingState.NotTracked);
 }
        /// <summary>
        /// Translates between kinect and drawing points
        /// </summary>
        private System.Drawing.Point TranslatePoint(CameraSpacePoint point, CoordinateMapper mapper)
        {
            var colorPoint = mapper.MapCameraPointToColorSpace(point);

            return(new System.Drawing.Point((int)colorPoint.X, (int)colorPoint.Y));
        }
 /// <summary>
 /// Creates a new instance of <see cref="HandsController"/> with the specified coordinate mapper.
 /// </summary>
 /// <param name="coordinateMapper">The coordinate mapper that will be used during the finger detection process.</param>
 public HandsController(CoordinateMapper coordinateMapper)
 {
     CoordinateMapper = coordinateMapper;
 }
Exemplo n.º 56
0
    void LateUpdate()
    {
        bodies = bodySourceManager.GetData();
        if (bodies == null)
        {
            Debug.Log("No Bodies");
            return;
        }

        if (_CoordinateMapper == null)
        {
            _CoordinateMapper = bodySourceManager.Sensor.CoordinateMapper;
        }

        // iterate through each body and update face source
        for (int i = 0; i < bodyCount; i++)
        {
            var body = bodies[i];
            if (body != null)
            {
                if (body.IsTracked)
                {
                    Windows.Kinect.Joint handRight     = body.Joints[JointType.HandRight];
                    Windows.Kinect.Joint elbowRight    = body.Joints[JointType.ElbowRight];
                    Windows.Kinect.Joint handLeft      = body.Joints[JointType.HandLeft];
                    Windows.Kinect.Joint elbowLeft     = body.Joints[JointType.ElbowLeft];
                    Windows.Kinect.Joint shoulderLeft  = body.Joints[JointType.ShoulderLeft];
                    Windows.Kinect.Joint shoulderRight = body.Joints[JointType.ShoulderRight];
                    Windows.Kinect.Joint spindShoulder = body.Joints[JointType.SpineShoulder];
                    Windows.Kinect.Joint spindMid      = body.Joints[JointType.SpineMid];
                    // 画面上のポイントを取得する
                    Vector3 handR        = GetVector3FromJoint(handRight);
                    Vector3 handL        = GetVector3FromJoint(handLeft);
                    Vector3 reactorPoint = (GetVector3FromJoint(spindShoulder) + GetVector3FromJoint(spindMid)) / 2;
                    // 左手の方向
                    Vector3 HandDirectionR = GetDirection(elbowRight, handRight);
                    // 右手の方向
                    Vector3 HandDirectionL = GetDirection(elbowLeft, handLeft);
                    // リアクターの方向を取得する
                    Vector3 MidToRight       = GetDirection(spindMid, shoulderRight);
                    Vector3 MidToLeft        = GetDirection(spindMid, shoulderLeft);
                    Vector3 ReacterDirection = Vector3.Cross(MidToRight, MidToLeft);

                    // 時間経過を取得する
                    float ration = Time.deltaTime;
                    // Reactor
                    _reactorController.UpdatePosition(reactorPoint, ReacterDirection);
                    // _reactorController.UpdateScale(MidToLeft.magnitude);
                    if (body.HandRightState == HandState.Open || body.HandLeftState == HandState.Open)
                    {
                        _reactorController.Activate(ration);
                    }
                    else
                    {
                        if (noOpenCounterR >= CounterTh)
                        {
                            _reactorController.DeActivate(ration);
                            noOpenReactor = 0;
                        }
                        else
                        {
                            noOpenReactor += 1;
                        }
                    }

                    // Right hand
                    if (body.HandRightState == HandState.Open)
                    {
                        luminousManageR.Attack(ration, handR, HandDirectionR);
                        noOpenCounterR = 0;
                    }
                    else
                    {
                        if (noOpenCounterR >= CounterTh)
                        {
                            luminousManageR.StopBeem();
                        }
                        else
                        {
                            noOpenCounterR += 1;
                        }
                    }

                    // Left hand
                    if (body.HandLeftState == HandState.Open)
                    {
                        luminousManageL.Attack(ration, handL, HandDirectionL);
                        noOpenCounterL = 0;
                    }
                    else
                    {
                        if (noOpenCounterL >= CounterTh)
                        {
                            luminousManageL.StopBeem();
                        }
                        else
                        {
                            noOpenCounterL += 1;
                        }
                    }
                }
            }
        }
    }
Exemplo n.º 57
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // set the display specifics
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];

            // populate face result colors - one for each face index
            this.faceBrush = new List <Brush>()
            {
                Brushes.White,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Red,
                Brushes.LightBlue,
                Brushes.Yellow
            };

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Exemplo n.º 58
0
        /// <summary>
        /// Takes in the depth frame, converts it to camera space
        /// </summary>
        public unsafe void SetupCanvasFrame()
        {
            ushort *frameData = (ushort *)_depthFrameData;

            Parallel.For(0, TouchWallApp.KinectHeight, depthArrayRowIndex =>
            {
                for (int depthArrayColumnIndex = 0; depthArrayColumnIndex < TouchWallApp.KinectWidth; depthArrayColumnIndex++)
                {
                    int depthIndex = depthArrayColumnIndex + (depthArrayRowIndex * TouchWallApp.KinectWidth);
                    ushort depth   = frameData[depthIndex];
                    if (depth < 500 || depth > 5000)
                    {
                        // This stuff is very heavy in computation
                        // renable if you have enough cpu horsepower

                        /*
                         * int x = depthIndex % TouchWallApp.KinectWidth;
                         * int y = (depthIndex - x) / TouchWallApp.KinectWidth;
                         *
                         * ushort[,] filterCollection = new ushort[24, 2];
                         *
                         * int innerBandCount = 0;
                         * int outerBandCount = 0;
                         *
                         * for (int yi = -2; yi < 3; yi++)
                         * {
                         *  for (int xi = -2; xi < 3; xi++)
                         *  {
                         *      if (xi != 0 || yi != 0)
                         *      {
                         *
                         *          var xSearch = x + xi;
                         *          var ySearch = y + yi;
                         *
                         *          if (xSearch >= 0 && xSearch < TouchWallApp.KinectWidth &&
                         *              ySearch >= 0 && ySearch < TouchWallApp.KinectHeight)
                         *          {
                         *              int index = xSearch + (ySearch * TouchWallApp.KinectWidth);
                         *              if (frameData[index] < 5000 && frameData[index] > 500)
                         *              {
                         *                  // We want to find count the frequency of each depth
                         *                  for (int i = 0; i < 24; i++)
                         *                  {
                         *                      if (filterCollection[i, 0] == frameData[index])
                         *                      {
                         *                          // When the depth is already in the filter collection
                         *                          // we will just increment the frequency.
                         *                          filterCollection[i, 1]++;
                         *                          break;
                         *                      }
                         *                      else if (filterCollection[i, 0] == 0)
                         *                      {
                         *                          // When we encounter a 0 depth in the filter collection
                         *                          // this means we have reached the end of values already counted.
                         *                          // We will then add the new depth and start it's frequency at 1.
                         *                          filterCollection[i, 0] = frameData[index];
                         *                          filterCollection[i, 1]++;
                         *                          break;
                         *                      }
                         *                  }
                         *                  if (yi != 2 && yi != -2 && xi != 2 && xi != -2)
                         *                  {
                         *                      innerBandCount++;
                         *                  }
                         *                  else
                         *                  {
                         *                      outerBandCount++;
                         *                  }
                         *              }
                         *
                         *
                         *
                         *          }
                         *
                         *      }
                         *  }
                         *
                         * }
                         *
                         * if (innerBandCount >= 2 || outerBandCount >= 7)
                         * {
                         *  ushort frequency = 0;
                         *  ushort newdepth = 0;
                         *  // This loop will determine the statistical mode
                         *  // of the surrounding pixels for assignment to
                         *  // the candidate.
                         *  for (int i = 0; i < 24; i++)
                         *  {
                         *      // This means we have reached the end of our
                         *      // frequency distribution and can break out of the
                         *      // loop to save time.
                         *      if (filterCollection[i, 0] == 0)
                         *          break;
                         *      if (filterCollection[i, 1] > frequency)
                         *      {
                         *          newdepth = filterCollection[i, 0];
                         *          frequency = filterCollection[i, 1];
                         *      }
                         *
                         *  }
                         *
                         *  DepthPixels[depthIndex] = (byte) ((newdepth*256)/5000);
                         * }*/
                        // Comment out the next line if you want to use the above code.
                        DepthPixels[depthIndex] = 0;
                    }
                    else
                    {
                        DepthPixels[depthIndex] = (byte)((depth * 256) / 5000);
                    }
                }
            });

            for (int i = 0; i < TouchWallApp.KinectWidth; i++)
            {
                int depthIndex = i + ((TouchWallApp.KinectHeight * TouchWallApp.KinectWidth) / 2);
                DepthPixels[depthIndex] = (byte)255;
            }

            CoordinateMapper m = TouchWallApp.KinectSensor.CoordinateMapper;

            ushort[] frameUshorts = new ushort[_depthFrameDataSize / sizeof(ushort)];
            for (int i = 0; i < _depthFrameDataSize / sizeof(ushort); i++)
            {
                frameUshorts[i] = frameData[i];
            }

            m.MapDepthFrameToCameraSpace(frameUshorts, _spacePoints); // X,Y,Z in terms of the CAMERA, not the user
            // Now spacePoints contains a 3d virtualisation of where everything is.
        }
Exemplo n.º 59
0
 public RockViewModel(DepthSpacePoint depPoint, ushort dep,
                      double widthOnCanvas, double heightOnCanvas, Canvas canvas, CoordinateMapper coorMap) :
     this(coorMap.MapDepthPointToCameraSpace(depPoint, dep), new Size(widthOnCanvas, heightOnCanvas), canvas, coorMap)
 {
 }
 /// <summary>
 /// Creates a new instance of <see cref="GreenScreenBitmapGenerator"/>.
 /// </summary>
 /// <param name="mapper">The coordinate mapper used for the background removal.</param>
 public GreenScreenBitmapGenerator(CoordinateMapper mapper)
 {
     CoordinateMapper = mapper;
 }