Ejemplo n.º 1
0
 public void RelativeTime()
 {
     bodyReader = GetBodyFrameReader();
     using ( var bodyFrame = bodyReader.AcquireLatestFrame() ) {
         Assert.AreNotEqual( 0, bodyFrame.RelativeTime() );
     }
 }
Ejemplo n.º 2
0
        // Primary function. Runs when the window loads in.
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body);
                _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 3
0
        public MainWindow()
        {
            InitializeComponent();
            var hubConnection = new HubConnection("http://divewakeweb.azurewebsites.net/");
            stockTickerHubProxy = hubConnection.CreateHubProxy("WakeHub");
            hubConnection.Start().Wait();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 4
0
        public Recorder(string filename, Dispatcher dispatcher)
        {
            if (!sensor.IsOpen)
            {
                sensor.Open();
            }

            if (sensor.IsOpen)
            {
                bodyReader = sensor.BodyFrameSource.OpenReader();
                colorReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth | FrameSourceTypes.Color);

                logger.Trace("Kinect sensor is open");
            }
            else
            {
                logger.Error("Kinect sensor is not open");
            }

            metadata = MetadataFactory.Create();

            fileStream = new FileStream(filename, FileMode.Create);
            AppendMessageToFileStream(metadata.Serialize());

            this.dispatcher = dispatcher;
        }
        public MainWindow()
        {
            // Get the sensor
            sensor = KinectSensor.GetDefault();
            sensor.Open();

            // Setup readers for each source of data we want to use
            colorFrameReader = sensor.ColorFrameSource.OpenReader();
            bodyFrameReader = sensor.BodyFrameSource.OpenReader();

            // Setup event handlers that use what we get from the readers
            colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;
            bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // Get ready to draw graphics
            drawingGroup = new DrawingGroup();

            // Initialize the components (controls) of the window
            InitializeComponent();

            // Initialize color components

            // create the bitmap to display
            colorBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Bgr32, null);
            ColorImage.Source = colorBitmap;

            // Initialize the game components
            birdHeight = this.Height / 2; // put the bird in the middle of the screen
            prevRightHandHeight = 0;
            prevLeftHandHeight = 0;
            pipeX = -1;
            pipeGapY = 250;
            pipeGapLength = 170;
            randomGenerator = new Random();
        }
        /// <summary>
        /// The main window of the app.
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _depthReader = _sensor.DepthFrameSource.OpenReader();
                _depthReader.FrameArrived += DepthReader_FrameArrived;

                _infraredReader = _sensor.InfraredFrameSource.OpenReader();
                _infraredReader.FrameArrived += InfraredReader_FrameArrived;

                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;
                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                // Initialize the HandsController and subscribe to the HandsDetected event.
                _handsController = new HandsController();
                _handsController.HandsDetected += HandsController_HandsDetected;

                _sensor.Open();
            }
        }
Ejemplo n.º 7
0
		//Run the application async
		static async Task RunAsync()
		{
			//Get the default Kinect Sensor
			_kinectSensor = KinectSensor.GetDefault();

			// open the reader for the body frames
			_bodyFrameReader = _kinectSensor.BodyFrameSource.OpenReader();

			// Set the coordinate Mapper
			_coordinateMapper = _kinectSensor.CoordinateMapper;

			//open the sensor
			_kinectSensor.Open();

			//Check if the Sensor is available
			Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort"));
			while (!_kinectSensor.IsAvailable)
			{
				//wait for sensor
			}
			Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort"));

			//Init gesture
			_handOverHeadDetector = new HandOverHeadDetector(HandDetectionType.BothHands, HandState.Open);
			//Subscribe to completed event
			_handOverHeadDetector.GestureCompleteEvent += HandOverHeadDetectorOnGestureCompleteEvent;

			//Start reciving kinect Frames
			if (_bodyFrameReader != null)
			{
				_bodyFrameReader.FrameArrived += Reader_FrameArrived;
			}
		}
        public MainWindow()
        {
            InitializeComponent();
            network.init();
             _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                // Identify the bodies
                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features, some are commented out, include later.
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);// |
                /*
                                                                FaceFrameFeatures.FaceEngagement |
                                                                FaceFrameFeatures.Glasses |
                                                                FaceFrameFeatures.Happy |
                                                                FaceFrameFeatures.LeftEyeClosed |
                                                                FaceFrameFeatures.MouthOpen |
                                                                FaceFrameFeatures.PointsInColorSpace |
                                                                FaceFrameFeatures.RightEyeClosed);
                                                                */

                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        /// コンストラクタ
        /// </summary>
        public MainWindow()
        {
            InitializeComponent();

            // Init Kinect Sensors
            this.kinect = KinectSensor.GetDefault();

            if (kinect == null)
            {
                this.showCloseDialog("Kinectが接続されていないか、利用できません。アプリケーションを終了します。");
            }

            this.colorImageFormat = ColorImageFormat.Bgra;
            this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
            this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += ColorFrameReader_FrameArrived;
            bodyFrameReader = kinect.BodyFrameSource.OpenReader();
            bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;

            this.kinect.Open();
            this.bodies = this.bodies = new Body[kinect.BodyFrameSource.BodyCount];

            KinectRegion.SetKinectRegion(this, kinectRegion);
            this.kinectRegion.KinectSensor = KinectSensor.GetDefault();

            this.isTraining = false;
        }
        public MainWindow()
        {
            InitializeComponent();
            try
            {
                ///キネクト本体の接続を確保、たしか接続されてない場合はfalseとかになった記憶
                this.kinect = KinectSensor.GetDefault();
                ///読み込む画像のフォーマット(rgbとか)を指定、どうやって読み込むかのリーダの設定も
                this.colorImageFormat = ColorImageFormat.Bgra;
                this.colorFrameDescription = this.kinect.ColorFrameSource.CreateFrameDescription(this.colorImageFormat);
                this.colorFrameReader = this.kinect.ColorFrameSource.OpenReader();
                this.colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                this.kinect.Open();//キネクト起動!!
                if (!kinect.IsOpen)
                {
                    this.errorLog.Visibility = Visibility.Visible;
                    this.errorLog.Content = "キネクトが見つからないよ!残念!";
                    throw new Exception("キネクトが見つかりませんでした!!!");
                }
                ///bodyを格納するための配列作成
                bodies = new Body[kinect.BodyFrameSource.BodyCount];

                ///ボディリーダーを開く
                bodyFrameReader = kinect.BodyFrameSource.OpenReader();
                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Close();
            }
        }
Ejemplo n.º 11
0
        public MainWindow()
        {
            InitializeComponent();

            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                _sensor.Open();

                _bodies = new Body[_sensor.BodyFrameSource.BodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // 2) Initialize the face source with the desired features
                _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace |
                                                              FaceFrameFeatures.FaceEngagement |
                                                              FaceFrameFeatures.Glasses |
                                                              FaceFrameFeatures.Happy |
                                                              FaceFrameFeatures.LeftEyeClosed |
                                                              FaceFrameFeatures.MouthOpen |
                                                              FaceFrameFeatures.PointsInColorSpace |
                                                              FaceFrameFeatures.RightEyeClosed |
                                                              FaceFrameFeatures.LookingAway);
                _faceReader = _faceSource.OpenReader();
                _faceReader.FrameArrived += FaceReader_FrameArrived;
            }
        }
Ejemplo n.º 12
0
        public MainWindow()
        {
            InitializeComponent();
            _sensor = KinectSensor.GetDefault();

            if (_sensor != null)
            {
                // Listen for body data.
                _bodySource = _sensor.BodyFrameSource;
                _bodyReader = _bodySource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;

                // Listen for HD face data.
                _faceSource = new HighDefinitionFaceFrameSource(_sensor);
                _faceSourceSub = new HighDefinitionFaceFrameSource(_sensor);
               // _faceSource.TrackingIdLost += OnTrackingIdLost;
                _faceReader = _faceSource.OpenReader();
                _faceReaderSub = _faceSourceSub.OpenReader();

                _faceReader.FrameArrived += FaceReader_FrameArrived;
                _faceReaderSub.FrameArrived += FaceReaderSub_FrameArrived;

                _faceModel = new FaceModel();
                _faceAlignment = new FaceAlignment();
                _faceAlignmentSub = new FaceAlignment();
                // Start tracking!        
                _sensor.Open();
            }
        }
        public KinectController()
        {
            kinectSensor = KinectSensor.GetDefault();

            // open the reader for the body frames
            bodyReader = kinectSensor.BodyFrameSource.OpenReader();
            kinectSensor.Open();

            Arm = ArmPointing.Nothing;
            hasPointed = false;
            lastAveragePositionLeft = 0f;
            lastAveragePositionRight = 0f;
            frameCounterLeft = 0;
            frameCounterRight = 0;

            if (!File.Exists(OPT_FILE))
            {
                offsetX = 0;
                offsetY = 0;
            } else
            {
                string data = File.ReadAllText(OPT_FILE);
                List <float> offset = JsonConvert.DeserializeObject<List<float>>(data);
                offsetX = offset[0];
                offsetY = offset[1];
            }
        }
Ejemplo n.º 14
0
        void MainWindow_Loaded( object sender, RoutedEventArgs e )
        {
            kinect = Kinect2.KinectSensor.Default;
            kinect.Open();
            colorReader = kinect.ColorFrameSource.OpenReader();
            bodyReader = kinect.BodyFrameSource.OpenReader();

            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="sensor">Kinect sensor</param>
        public KinectSensorBodyFrameProvider(KinectSensor sensor)
        {
            if (sensor == null)
                throw new ArgumentNullException("sensor");

            this.sensor = sensor;
            this.reader = this.sensor.BodyFrameSource.OpenReader();
            this.reader.FrameArrived += this.FrameArrived;
        }
Ejemplo n.º 16
0
        public void AcquireLatestFrame()
        {
            bodyReader = GetBodyFrameReader();
            using ( var bodyFrame = bodyReader.AcquireLatestFrame() ) {
            }

            using ( var bodyFrame = bodyReader.AcquireLatestFrame() ) {
            }
        }
 public HandOverheadEngagementModel(int engagedPeopleAllowed)
 {
     this.EngagedPeopleAllowed = engagedPeopleAllowed;
     var sensor = KinectSensor.GetDefault();
     this.bodyReader = sensor.BodyFrameSource.OpenReader();
     this.bodyReader.FrameArrived += this.BodyReader_FrameArrived;
     sensor.Open();
     this.bodies = new Body[this.bodyReader.BodyFrameSource.BodyCount];
     this.handsToEngage = new List<BodyHandPair>();
 }
        public void Start(SlideShowWindow slideShowWindow)
        {
            _slideShowWindow = slideShowWindow;

            _bodyFrameReader = _kinectSensor.BodyFrameSource.OpenReader();
            _bodyFrameReader.FrameArrived += OnBodyFrameArrived;

            _bodies.Clear();
            _scans.Clear();
        }
Ejemplo n.º 19
0
 public void GetAndRefreshBodyData()
 {
     bodyReader = GetBodyFrameReader();
     using ( var bodyFrame = bodyReader.AcquireLatestFrame() ) {
         Body[] bodies = new Body[6];
         bodyFrame.GetAndRefreshBodyData( bodies );
         foreach ( var body in bodies ) {
         }
     }
 }
Ejemplo n.º 20
0
        public DebugWindow(Controller.GameController GameController)
        {
            InitializeComponent();
            db = new Helpers.DBHelper();
            attGameController = GameController;

            attks = KinectSensor.GetDefault();
            attks.Open();
            attBodyFrameReader = attks.BodyFrameSource.OpenReader();
            attBodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
        }
        private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e )
        {
            if ( bodyFrameReader != null ) {
                bodyFrameReader.Dispose();
                bodyFrameReader = null;
            }

            if ( kinect != null ) {
                kinect.Close();
                kinect = null;
            }
        }
 public GameModeWindow()
 {
     InitializeComponent();
     KinectRegion.SetKinectRegion(this, kinectRegion);
     this.kinectRegion.KinectSensor = KinectSensor.GetDefault();
     this.bodyFrameReader = kinectRegion.KinectSensor.BodyFrameSource.OpenReader();
     box_items.ItemsSource = GameLogic.boxes;
     stall = false;
     game_finished = false;
     if (GameLogic.game_mode.popQuiz)
     {
         GameLogic.loadQuestions();
     }
 }
Ejemplo n.º 23
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MainPage"/> class.
        /// </summary>
        public MainPage()
        {
            this.InitializeComponent();

            RecordButton.Click += RecordButton_Click;

            ColorCompressionCombo.Items.Add("None (1920x1080)");
            ColorCompressionCombo.Items.Add("None (1280x720)");
            ColorCompressionCombo.Items.Add("None (640x360)");
            ColorCompressionCombo.Items.Add("JPEG (1920x1080)");
            ColorCompressionCombo.Items.Add("JPEG (1280x720)");
            ColorCompressionCombo.Items.Add("JPEG (640x360)");
            ColorCompressionCombo.SelectedIndex = 0;

            SmoothingCombo.Items.Add("None");
            SmoothingCombo.Items.Add("Kalman Filter");
            SmoothingCombo.Items.Add("Double Exponential");
            SmoothingCombo.SelectionChanged += SmoothingCombo_SelectionChanged;
            SmoothingCombo.SelectedIndex = 0;

            DisplayCombo.Items.Add("Body");
            DisplayCombo.Items.Add("Color");
            DisplayCombo.Items.Add("Depth");
            DisplayCombo.Items.Add("Infrared");
            DisplayCombo.SelectionChanged += DisplayCombo_SelectionChanged;
            DisplayCombo.SelectedIndex = 0;

            _sensor = KinectSensor.GetDefault();

            _bodyReader = _sensor.BodyFrameSource.OpenReader();
            _bodyReader.FrameArrived += _bodyReader_FrameArrived;

            _colorReader = _sensor.ColorFrameSource.OpenReader();
            _colorReader.FrameArrived += _colorReader_FrameArrived;
            var colorFrameDesc = _sensor.ColorFrameSource.FrameDescription;
            _colorData = new byte[colorFrameDesc.LengthInPixels * 4];

            _depthReader = _sensor.DepthFrameSource.OpenReader();
            _depthReader.FrameArrived += _depthReader_FrameArrived;
            var depthFrameDesc = _sensor.DepthFrameSource.FrameDescription;
            _depthData = new ushort[depthFrameDesc.LengthInPixels];

            _infraredReader = _sensor.InfraredFrameSource.OpenReader();
            _infraredReader.FrameArrived += _infraredReader_FrameArrived;
            var infraredFrameDesc = _sensor.InfraredFrameSource.FrameDescription;
            _infraredData = new ushort[infraredFrameDesc.LengthInPixels];

            _sensor.Open();
        }
Ejemplo n.º 24
0
            public InterviewWindow()
            {
                question_index = 0;

                mplayer = new MediaPlayer();
                timer = new DispatcherTimer();
                timer.Interval = TimeSpan.FromSeconds(1);
                timer.Tick += timer_Tick;
                timer.Start();


                // only one sensor is currently supported
                this.kinectSensor = KinectSensor.GetDefault();

                // set IsAvailableChanged event notifier
                this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

                // open the sensor
                this.kinectSensor.Open();

                // set the status text
                this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                                : Properties.Resources.NoSensorStatusText;

                // open the reader for the body frames
                this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

                // set the BodyFramedArrived event notifier
                this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
            
                // initialize the MainWindow
                this.InitializeComponent();

                // set our data context objects for display in UI
                //this.DataContext = this;

                // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
                GestureResultView result = new GestureResultView(0, false, false, 0.0f);
                GestureDetector detector = new GestureDetector(this.kinectSensor, result);
                this.gestureDetector = detector;
                // split gesture results across the first two columns of the content grid
                //ContentControl contentControl = new ContentControl();
                //contentControl.Content = this.gestureDetector.GestureResultView;
                //Grid.SetColumn(contentControl, 0);
                //Grid.SetRow(contentControl, 0);
                //this.contentGrid.Children.Add(contentControl);

                play_audio(question_index);
            }
Ejemplo n.º 25
0
        public KioskInteractionService(ISensorService<KinectSensor> sensorService,
            IDemographicsService demographicsService,
            IItemInteractionService itemInteractionService,
            IBodyTrackingService bodyTrackingService,
            IConfigurationProvider configurationProvider)
        {
            _currentZone = "NoTrack";
            _demographicService = demographicsService;

            _eventHub = new EventHubMessageSender(ConfigurationManager.AppSettings["Azure.Hub.Kiosk"]);

            _sensorService = sensorService;
              //  _telemetryService = telemetryService;

            _itemInteractionService = itemInteractionService;
            _itemInteractionService.ItemInteraction += _itemInteractionService_ItemInteraction;
            _coordinateMapper = _sensorService.Sensor.CoordinateMapper;

            _configurationProvider = configurationProvider;
            _configurationProvider.ConfigurationSettingsChanged += _configurationProvider_ConfigurationSettingsChanged;
            GetConfig();

            _sensorService.StatusChanged += _sensorService_StatusChanged;
            _bodyFrameReader = _sensorService.Sensor.BodyFrameSource.OpenReader();
            if (_bodyFrameReader != null)
                _bodyFrameReader.FrameArrived += _bodyFrameReader_FrameArrived;

            _sensorService.Open();

            _interactionProcessingQueue = new BlockingCollection<KioskStateEventArgs>();
            {
                IObservable<KioskStateEventArgs> ob = _interactionProcessingQueue.
                  GetConsumingEnumerable().
                  ToObservable(TaskPoolScheduler.Default);

                ob.Subscribe(p =>
                {
                    //var temp = Thread.CurrentThread.ManagedThreadId;
                    // This handler will get called whenever
                    // anything appears on myQueue in the future.
                    this.SendIteraction(p);
                    //Debug.Write("Consuming: {0}\n", p);
                });
            }

            _bodyTrackingService = bodyTrackingService;

            CurrentState = KioskStates.NoTrack;
        }
Ejemplo n.º 26
0
        public void Dispose()
        {
            if (this.bodyFrameReader != null)
            {
                // BodyFrameReader is IDisposable
                this.bodyFrameReader.Dispose();
                this.bodyFrameReader = null;
            }

            if (this.kinectSensor != null)
            {
                this.kinectSensor.Close();
                this.kinectSensor = null;
            }
        }
Ejemplo n.º 27
0
        public MainWindow()
        {
            this.myKinectSensor = KinectSensor.GetDefault();

            this.bodyFrameReader = this.myKinectSensor.BodyFrameSource.OpenReader();

            this.bones = new List<Tuple<JointType, JointType>>();
            this.bones.Add(new Tuple<JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple<JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));

            this.myKinectSensor.Open();

            this.InitializeComponent();
        }
Ejemplo n.º 28
0
        public MainWindow()
        {
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
            this.depthFrameReader.FrameArrived += this.Reader_FrameArrived;
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            depth = new DepthInterpreter(kinectSensor);

            InitializeComponent();

            kinectSensor.Open();

        }
        public MainWindow()
        {
            InitializeComponent();
            network.init();
            _sensor = KinectSensor.GetDefault();
            if(_sensor != null)
            {
                _sensor.Open();

                bodyCount = _sensor.BodyFrameSource.BodyCount;
                // Identify the bodies 
                _bodies = new Body[bodyCount];

                _colorReader = _sensor.ColorFrameSource.OpenReader();
                _colorReader.FrameArrived += ColorReader_FrameArrived;
                _bodyReader = _sensor.BodyFrameSource.OpenReader();
                _bodyReader.FrameArrived += BodyReader_FrameArrived;

                // Initialize the face source with the desired features.
                _faceSources = new FaceFrameSource[bodyCount];
                _faceReaders = new FaceFrameReader[bodyCount];

                for(int i = 0; i < bodyCount; i++)
                {
                    // Create the face frame source with the required features and initial tracking id of 0
                    _faceSources[i] = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);

                    // open the corresponding reader
                    _faceReaders[i] = _faceSources[i].OpenReader();
                    _faceReaders[i].FrameArrived += FaceReader_FrameArrived;
                }

                _faceResults = new FaceFrameResult[bodyCount];

                // Set the arrays and values for person switches and timeouts
                personSize = 3;
                ims = new Image[3] {maskImage, maskImage2, maskImage3};
                trackedInd = new bool[3] { false, false, false };
                _persons = new Person[personSize];
                for(int i = 0; i < personSize; i++)
                {
                    _persons[i] = new Person(0, ims[i], -1);
                }
                paths = new String[3] { "pack://application:,,,/Images/tinfoil.png",
                                        "pack://application:,,,/Images/cowboy.png",
                                        "pack://application:,,,/Images/napolean.png"};
            }
        }
Ejemplo n.º 30
0
 public void EnableSkeleton(bool enable, bool smooth)
 {
     if (enable && this.bodyreader == null && this.Runtime.IsAvailable)
     {
         this.bodyreader = this.Runtime.BodyFrameSource.OpenReader();
         this.bodyreader.FrameArrived += this.Runtime_SkeletonFrameReady;
     }
     else
     {
         if (this.bodyreader != null)
         {
             this.bodyreader.FrameArrived -= this.Runtime_SkeletonFrameReady;
             this.bodyreader.Dispose();
             this.bodyreader = null;
         }
     }
 }
Ejemplo n.º 31
0
        /// <summary>
        /// 主函数
        /// </summary>
        public GaitParamWindow()
        {
            //取得传感器
            this.kinectSensor = KinectSensor.GetDefault();

            //取得坐标转换器
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            //获取彩色图像信息
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            //打开身体Reader
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            //打开彩色图Reader
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            //获得骨骼空间坐标
            this.displayWidth  = colorFrameDescription.Width;
            this.displayHeight = colorFrameDescription.Height;

            //元组链表作为骨骼
            this.bones = new List <Tuple <JointType, JointType> >();

            // 躯干
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // 右臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // 左臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // 右腿
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // 左腿
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));



            // 每个人身体的颜色
            this.bodyColors = new List <System.Windows.Media.Pen>();

            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Red, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Orange, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Green, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Blue, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Indigo, 15));
            this.bodyColors.Add(new System.Windows.Media.Pen(System.Windows.Media.Brushes.Violet, 15));

            //打开传感器
            this.kinectSensor.Open();

            //创建绘图和绑定窗体
            this.drawingGroup = new DrawingGroup();
            this.imageSource  = new DrawingImage(this.drawingGroup);
            this.DataContext  = this;

            //创建位图
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
            InitializeComponent();
        }
Ejemplo n.º 32
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;



            //new vars

            // open the reader for the color frames
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;


            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            //added func to send image
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            //run python file on image


            ProcessStartInfo myProcessStartInfo = new ProcessStartInfo(@Directory.GetCurrentDirectory() + "/tensorflow_cpu/python.exe");

            myProcessStartInfo.Arguments = "evaluate.py";
            Process myProcess = new Process();

            myProcess.StartInfo = myProcessStartInfo;
            myProcess.Start();
            myProcess.WaitForExit();
            myProcess.Close();

            // read in file

            string[] lines = File.ReadAllLines("boxes.txt");

            foreach (string line in lines)
            {
                string[] sections = line.Split(':');

                string[] box    = sections[1].Split(',');
                double[] intBox = Array.ConvertAll(box, Double.Parse);
                double   x0     = intBox[0] * displayWidth;
                double   y0     = intBox[1] * displayHeight;
                double   w      = intBox[2] * displayWidth;
                double   h      = intBox[3] * displayHeight;
                double[] bbox   = { x0, y0, w, h };
                label    newBox = new label((sections[0]), bbox);
                this.Labels.Add(newBox);
            }



            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 33
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 34
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class
        /// </summary>
        public MainWindow()
        {
            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI

            gestureResultView = new GestureResultView(false, false, 0.0f, "null");
            gestureDetector   = new GestureDetector(this.kinectSensor, gestureResultView);

            ContentControl contentControl = new ContentControl();

            contentControl.Content = this.gestureDetector.GestureResultView;

            Grid.SetColumn(contentControl, 0);
            Grid.SetRow(contentControl, 1);

            this.contentGrid.Children.Add(contentControl);

            MoveTo(0, 0);
            DispatcherTimer timer = new DispatcherTimer();

            timer.Interval = TimeSpan.FromSeconds(0.1);
            timer.Tick    += timer_Tick;
            timer.Start();

            this.Abcsissa = abcsissa;
            this.Ordinate = ordinate;
            if (serialAttached == true)
            {
                this.serialport     = new SerialPort();
                serialport.PortName = "COM3";
                serialport.Open();
                serialport.BaudRate = 57600;
            }
        }//main window
Ejemplo n.º 35
0
        /// <summary>
        /// Releases unmanaged and - optionally - managed resources.
        /// </summary>
        /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param>
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                if (_bodyReader != null)
                {
                    _bodyReader.FrameArrived -= _bodyReader_FrameArrived;
                    _bodyReader.Dispose();
                    _bodyReader = null;
                }

                if (_colorReader != null)
                {
                    _colorReader.FrameArrived -= _colorReader_FrameArrived;
                    _colorReader.Dispose();
                    _colorReader = null;
                }

                if (_depthReader != null)
                {
                    _depthReader.FrameArrived -= _depthReader_FrameArrived;
                    _depthReader.Dispose();
                    _depthReader = null;
                }

                if (_infraredReader != null)
                {
                    _infraredReader.FrameArrived -= _infraredReader_FrameArrived;
                    _infraredReader.Dispose();
                    _infraredReader = null;
                }

                try
                {
                    _writerSemaphore.Wait();
                    if (_writer != null)
                    {
                        _writer.Flush();

                        if (_writer.BaseStream != null)
                        {
                            _writer.BaseStream.Flush();
                        }

                        _writer.Dispose();
                        _writer = null;
                    }
                }
                catch (Exception ex)
                {
                    // TODO: Change to log the error
                    System.Diagnostics.Debug.WriteLine(ex);
                }
                finally
                {
                    _writerSemaphore.Dispose();
                }

                if (_processFramesCancellationTokenSource != null)
                {
                    _processFramesCancellationTokenSource.Dispose();
                    _processFramesCancellationTokenSource = null;
                }
            }
        }
        /// <summary>
        /// MainWindow实例化
        /// </summary>
        public MainWindow()
        {
            // 将默认的Kinect作为实例化的Kinect
            this.kinectSensor = KinectSensor.GetDefault();

            // 获取坐标映射器
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // 获取深度(显示)范围
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // 获取关节空间大小
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // 初始化目前所用的Kinect的骨骼帧阅读器
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // 定义两个关节点之间的骨头(实际上是一条线)
            this.bones = new List <Tuple <JointType, JointType> >();

            // 人体躯干
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // 右臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // 左臂
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // 填充人体颜色,每个人体填充一个颜色
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // 当Kinect状态更改时,激发IsAvailableChanged事件,调用Sensor_IsAvailableChanged函数判断Kinect当前状态
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // 启动Kinect
            this.kinectSensor.Open();

            // 设置Kinect的状态
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // 创建将用于绘图的绘图组
            this.drawingGroup = new DrawingGroup();

            // 创建可以在图像控件中使用的图像源
            this.imageSource = new DrawingImage(this.drawingGroup);

            // 在这个简单的例子中,使用window对象作为视图模型,不知道什么意思
            this.DataContext = this;

            // 初始化窗口的组件(控件)
            this.InitializeComponent();
        }
Ejemplo n.º 37
0
        public ExercisePage(String msg, String youtube_id)
        {
            //Client client = (Client)Application.Current.Resources["ApplicationScopeResource"];

            //string exercise = client.recvMsg();
            string exercise = msg;

            /**************************************************************************/
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // open the reader for the color frames
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // wire handler for frame arrival
            this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived;

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // create the colorFrameDescription from the ColorFrameSource using Bgra format
            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            this.displayWidth  = colorFrameDescription.Width;
            this.displayHeight = colorFrameDescription.Height;


            // create the bitmap to display
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            /***********************************************************************************************************/
            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();


            if (exercise.Equals("Lunges") || exercise.Equals("SidePlank") || exercise.Equals("StandingVRaise") || exercise.Equals("StandingPikeCrunch"))
            {
                //상체 운동

                // Torso
                this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

                // Right Arm
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

                // Left Arm
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));
            }
            else
            {
                //하체운동

                // Right Leg
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

                // Left Leg
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));
            }

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));



            //*************************************************//
            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;


            //*********************************************

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;



            // use the window object as the view model in this simple example
            this.DataContext = this;
            InitializeComponent();

            Gecko.Xpcom.Initialize("Firefox");

            WindowsFormsHost host    = new WindowsFormsHost();
            GeckoWebBrowser  browser = new GeckoWebBrowser();

            host.Child = browser;
            GridWeb.Children.Add(host);
            browser.Navigate("https://www.youtube.com/embed/" + youtube_id + "?autoplay=1");



            Ex_Name.Text = msg;
            SetDescription(msg);



            //youtube url 설정
            //Youtube_View.URL = "https://www.youtube.com/embed/"+youtube_id+"?autoplay=1";


            /*
             * switch (exercise)
             * {
             *  case "Lunges":
             *      Youtube_View.URL = "https://www.youtube.com/embed/QF0BQS2W80k?autoplay=1";
             *      break;
             *  case "StandingPikeCrunch":
             *      Youtube_View.URL = "https://www.youtube.com/embed/QF0BQS2W80k?autoplay=1";
             *      break;
             *
             *  case "SidePlank":
             *      Youtube_View.URL = "https://www.youtube.com/embed/QF0BQS2W80k?autoplay=1";
             *      break;
             *
             *  case "StandingVRaise":
             *      Youtube_View.URL = "https://www.youtube.com/embed/QF0BQS2W80k?autoplay=1";
             *      break;
             *
             *  default:
             *      Youtube_View.URL = "https://www.youtube.com/embed/QF0BQS2W80k?autoplay=1";
             *      break;
             * }
             */
        }
Ejemplo n.º 38
0
 void bodyFrameReader_FrameArrived(BodyFrameReader sender, BodyFrameArrivedEventArgs args)
 {
     UpdateBodyFrame(args);
     DrawBodyFrame();
 }
        public MainWindow()
        {
            main = this;

            InitializeComponent();

            options.Add(new TodoItem()
            {
                Title = "Pesquisar Voo para Paris", Color = "#ff00BCF2"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Voo para Roma"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Voo para Londres"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Hotel para Paris"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Hotel para Roma"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Hotel para Londres"
            });

            lbTodoList.ItemsSource = options;

            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
            //int col0Row = 0;
            //int col1Row = 0;
            //int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;
            //for (int i = 0; i < maxBodies; ++i)
            //{
            GestureResultView result   = new GestureResultView(0, false, false, 0.0f);
            GestureDetector   detector = new GestureDetector(this.kinectSensor, result, this.main, circle, this.Dispatcher);

            this.gestureDetectorList.Add(detector);

            // split gesture results across the first two columns of the content grid
            ContentControl contentControl = new ContentControl();

            contentControl.Content = this.gestureDetectorList[0].GestureResultView;

            //if (i % 2 == 0)
            //{
            // Gesture results for bodies: 0, 2, 4
            Grid.SetColumn(contentControl, 0);
            Grid.SetRow(contentControl, 2);
            //++col0Row;
            //}
            //else
            //{
            // Gesture results for bodies: 1, 3, 5
            // Grid.SetColumn(contentControl, 1);
            //Grid.SetRow(contentControl, col1Row);
            // ++col1Row;
            //}

            this.contentGrid.Children.Add(contentControl);
            //}

            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");             //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());
        }
        private void InitializeBodyFrameDisplayVariables()
        {
            // get the coordinate mapper
            coordinateMapper = sensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = sensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            displayWidth  = frameDescription.Width;
            displayHeight = frameDescription.Height;

            // open the reader for the body frames
            bodyFrameReader = sensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            bones = new List <Tuple <JointType, JointType> >(
                new Tuple <JointType, JointType>[] {
                // torso
                new Tuple <JointType, JointType>(JointType.Head, JointType.Neck),
                new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid),
                new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft),
                new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight),
                new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft),

                // right arm
                new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight),
                new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight),
                new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight),
                new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight),
                new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight),

                // Left Arm
                new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft),
                new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft),
                new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft),
                new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft),
                new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft),

                // Right Leg
                new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight),
                new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight),
                new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight),

                // Left Leg
                new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft),
                new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft),
                new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft)
            }
                );

            // populate body colors, one for each BodyIndex
            bodyColors = new List <Pen>();

            bodyColors.Add(new Pen(Brushes.Red, 6));
            bodyColors.Add(new Pen(Brushes.Orange, 6));
            bodyColors.Add(new Pen(Brushes.Green, 6));
            bodyColors.Add(new Pen(Brushes.Blue, 6));
            bodyColors.Add(new Pen(Brushes.Indigo, 6));
            bodyColors.Add(new Pen(Brushes.Violet, 6));

            // Create the drawing group we'll use for drawing
            drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            imageSource = new DrawingImage(drawingGroup);

            // use the window object as the view model in this simple example
            DataContext = this;
        }
 public PreposeGesturesFrameReader(PreposeGesturesFrameSource source)
 {
     mySource     = source;
     myBodyReader = mySource.KinectSensor.BodyFrameSource.OpenReader();
     myBodyReader.FrameArrived += myBodyReader_FrameArrived;
 }
        public MainWindow()
        {
            using (StreamWriter sw = new StreamWriter("file.txt", false, System.Text.Encoding.Default)) {
            }

            IPEndPoint ipPoint = new IPEndPoint(IPAddress.Parse("192.168.0.18"), 8080);

            Socket listenSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);

            try {
                listenSocket.Bind(ipPoint);

                listenSocket.Listen(10);

                handler = listenSocket.Accept();

                /*byte[] data = new byte[256];
                 *
                 * string message = "message";
                 * data = Encoding.UTF8.GetBytes(message);
                 * handler.Send(data);*/
                //handler.Shutdown(SocketShutdown.Both);
                //handler.Close();
            }
            catch (Exception ex) {
                Console.WriteLine(ex.Message);
            }


            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 43
0
        ////////////////////////////////////////////////////////////////////////////
        #region SUPPORT CODE
        ////////////////////////////////////////////////////////////////////////////

#if NETFX_CORE
        void _bodyReader_FrameArrived(BodyFrameReader sender, BodyFrameArrivedEventArgs args)
Ejemplo n.º 44
0
    private async Task InitializeKinect()
    {
        _kinectSensor = await Sensor.GetDefaultAsync();

        if (_kinectSensor != null)
        {
            await _kinectSensor.OpenAsync();

            if ((sensorFlags & KinectInterop.FrameSource.TypeColor) != 0)
            {
                if (sensorData.colorImage == null)
                {
                    sensorData.colorImage = new byte[sensorData.colorImageWidth * sensorData.colorImageHeight * 4];
                }

                _colorReader = await _kinectSensor.OpenColorFrameReaderAsync(ReaderConfig.HalfRate | ReaderConfig.HalfResolution);

                if (_colorReader != null)
                {
                    _colorReader.FrameArrived += ColorReader_FrameArrived;
                }
            }

            if ((sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0)
            {
                if (sensorData.depthImage == null)
                {
                    sensorData.depthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight];
                }

                _depthReader = await _kinectSensor.OpenDepthFrameReaderAsync();

                if (_depthReader != null)
                {
                    _depthReader.FrameArrived += DepthReader_FrameArrived;
                }
            }

            if ((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0)
            {
                if (sensorData.bodyIndexImage == null)
                {
                    sensorData.bodyIndexImage = new byte[sensorData.depthImageWidth * sensorData.depthImageHeight];
                }

                _bodyIndexReader = await _kinectSensor.OpenBodyIndexFrameReaderAsync();

                if (_bodyIndexReader != null)
                {
                    _bodyIndexReader.FrameArrived += BodyIndexReader_FrameArrived;
                }
            }

            if ((sensorFlags & KinectInterop.FrameSource.TypeBody) != 0)
            {
                _bodyReader = await _kinectSensor.OpenBodyFrameReaderAsync();

                if (_bodyReader != null)
                {
                    _bodyReader.FrameArrived += BodyReader_FrameArrived;
                }
            }

            // get the coordinate mapper
            _coordinateMapper  = _kinectSensor.GetCoordinateMapper();
            _coordinateMapper2 = new CoordinateMapper2();

            Debug.Log("UWP-K2 sensor opened");
        }
        else
        {
            Debug.Log("UWP-K2 sensor not found");
        }
    }
Ejemplo n.º 45
0
        ////////////////////////////////////////////////////////////////////////////
        #region PUBLIC METHODS
        ////////////////////////////////////////////////////////////////////////////

        /// <summary>
        /// Start the <c>KinectRecorder</c> session. This will write the file header and
        /// enable the recorder to begin processing frames.
        /// </summary>
        public void Start()
        {
            if (_isStarted)
            {
                return;
            }

            if (_isStopped)
            {
                throw new InvalidOperationException("Cannot restart a recording after it has been stopped");
            }

            if (_sensor != null)
            {
                if (EnableBodyRecorder)
                {
                    _bodyReader = _sensor.BodyFrameSource.OpenReader();
                    _bodyReader.FrameArrived += _bodyReader_FrameArrived;
                }

                if (EnableColorRecorder)
                {
                    _colorReader = _sensor.ColorFrameSource.OpenReader();
                    _colorReader.FrameArrived += _colorReader_FrameArrived;
                }

                if (EnableDepthRecorder)
                {
                    _depthReader = _sensor.DepthFrameSource.OpenReader();
                    _depthReader.FrameArrived += _depthReader_FrameArrived;
                }

                if (EnableInfraredRecorder)
                {
                    _infraredReader = _sensor.InfraredFrameSource.OpenReader();
                    _infraredReader.FrameArrived += _infraredReader_FrameArrived;
                }

                if (!_sensor.IsOpen)
                {
                    _sensor.Open();
                }
            }

            _isStarted = true;

            try
            {
                _writerSemaphore.Wait();

                // initialize and write file metadata
                var metadata = new FileMetadata()
                {
                    Version      = this.GetType().GetTypeInfo().Assembly.GetName().Version.ToString(),
                    ColorCodecId = this.ColorRecorderCodec.CodecId
                };
                if (_sensor != null)
                {
                    //metadata.DepthCameraIntrinsics = _sensor.CoordinateMapper.GetDepthCameraIntrinsics();
                    //metadata.DepthFrameToCameraSpaceTable = _sensor.CoordinateMapper.GetDepthFrameToCameraSpaceTable();
                }
                else
                {
                    var sensor = KinectSensor.GetDefault();
                    if (sensor != null)
                    {
                        //metadata.DepthCameraIntrinsics = sensor.CoordinateMapper.GetDepthCameraIntrinsics();
                        //metadata.DepthFrameToCameraSpaceTable = sensor.CoordinateMapper.GetDepthFrameToCameraSpaceTable();
                    }
                }
                _writer.Write(JsonConvert.SerializeObject(metadata));
            }
            catch (Exception ex)
            {
                // TODO: Change to log the error
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                _writerSemaphore.Release();
            }

            _processFramesTask = ProcessFramesAsync();
        }
Ejemplo n.º 46
0
 /// <summary>
 /// Starts reading data from the Kinect.
 /// </summary>
 public void Start()
 {
     Log.Information("BodyCapture: Start");
     _reader = _sensor.BodyFrameSource.OpenReader();
     _reader.FrameArrived += Reader_OnFrameArrived;
 }
Ejemplo n.º 47
0
        static void Main(string[] args)
        {
            //const string SkeletonStreamName = "skeleton";
            //SkeletonStreamMessage skeletonStreamMessage;// = new SkeletonStreamMessage { stream = SkeletonStreamName };

            KinectSensor    kinectSensor    = KinectSensor.GetDefault();
            BodyFrameReader bodyFrameReader = null;

            bodyFrameReader = kinectSensor.BodyFrameSource.OpenReader();
            ColorFrameReader colorFrameReader = null;

            colorFrameReader = kinectSensor.ColorFrameSource.OpenReader();

            _coordinateMapper = kinectSensor.CoordinateMapper;
            kinectSensor.Open();

            WebSocketServer server = new WebSocketServer("ws://localhost:8181");

            server.Start(socket =>
            {
                socket.OnOpen = () =>
                {
                    // Add the incoming connection to our list.
                    clients.Add(socket);
                };

                socket.OnClose = () =>
                {
                    // Remove the disconnected client from the list.
                    clients.Remove(socket);
                };


                socket.OnMessage = message =>
                {
                    if (message == "get-video")
                    {
                        int NUMBER_OF_FRAMES = new DirectoryInfo("Video").GetFiles().Length;

                        // Send the video as a list of consecutive images.
                        for (int index = 0; index < NUMBER_OF_FRAMES; index++)
                        {
                            foreach (var client in clients)
                            {
                                string path  = "Video/" + index + ".jpg";
                                byte[] image = ImageUtil.ToByteArray(path);

                                client.Send(image);
                            }

                            //   We send 30 frames per second, so sleep for 34 milliseconds.
                            System.Threading.Thread.Sleep(270);
                        }
                    }

                    else if (message == "get-bodies")
                    {
                        if (kinectSensor.IsOpen)
                        {
                            if (bodyFrameReader != null)
                            {
                                bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
                            }
                        }
                    }
                    else if (message == "get-color")
                    {
                        if (kinectSensor.IsOpen)
                        {
                            if (colorFrameReader != null)
                            {
                                colorFrameReader.FrameArrived += colorFrameReader_FrameArrived;
                            }
                        }
                    }
                };
            });

            // Wait for a key press to close...

            Console.ReadLine();
            kinectSensor.Close();
        }
Ejemplo n.º 48
0
        /// <summary>
        /// Start capture.
        /// </summary>
        public void Start()
        {
            if (FrameSync)
            {
                // open streams using a synchronized readers, the frame rate is equal to the lowest of each separated stream.
                // select which streams to enable
                var features = FrameSourceTypes.None;
                if (BodyFrameEvent != null)
                {
                    features |= FrameSourceTypes.Body;
                }
                if (ColorFrameEvent != null)
                {
                    features |= FrameSourceTypes.Color;
                }
                if (features == FrameSourceTypes.None)
                {
                    throw new ApplicationException("No event processor registered.");
                }
                // check reader state
                if (_multiFrameReader != null)
                {
                    throw new InvalidOperationException("Kinect already started.");
                }
                // open the reader
                _multiFrameReader = _kinectSensor.OpenMultiSourceFrameReader(features);
                if (_multiFrameReader == null)
                {
                    Close();
                    throw new ApplicationException("Error opening readers.");
                }

                // register to frames
                _multiFrameReader.MultiSourceFrameArrived += MultiFrameArrived;
            }
            else
            {
                // open streams using separate readers, each one with the highest frame rate possible.
                // open body reader
                if (BodyFrameEvent != null)
                {
                    if (_bodyFrameReader != null)
                    {
                        throw new InvalidOperationException("Kinect already started.");
                    }
                    _bodyFrameReader = _kinectSensor.BodyFrameSource.OpenReader();
                    if (_bodyFrameReader == null)
                    {
                        Close();
                        throw new ApplicationException("Error opening readers.");
                    }
                    _bodyFrameReader.FrameArrived += BodyFrameArrived;
                }
                // open color stream reader
                if (ColorFrameEvent != null)
                {
                    if (_colorFrameReader != null)
                    {
                        throw new InvalidOperationException("Kinect already started.");
                    }
                    _colorFrameReader = _kinectSensor.ColorFrameSource.OpenReader();
                    if (_colorFrameReader == null)
                    {
                        Close();
                        throw new ApplicationException("Error opening readers.");
                    }
                    _colorFrameReader.FrameArrived += ColorFrameArrived;
                }
            }
            _firstFrameRelativeTimeEventFired = false;
            _kinectSensor.Open();
        }
Ejemplo n.º 49
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            //Visual Alert Loading
            proximity_alert_image.BeginInit();
            proximity_alert_image.UriSource = new Uri("prox_visual_alert.bmp", UriKind.Relative);
            proximity_alert_image.EndInit();

            contamination_alert_image.BeginInit();
            contamination_alert_image.UriSource = new Uri("contamination_visual_alert.bmp", UriKind.Relative);
            contamination_alert_image.EndInit();

            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window - This is where it sets the image source
            this.InitializeComponent();
        }
Ejemplo n.º 50
0
        private void StartWork()
        {
            multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body);

            this.bodyFrameReader  = this.kinectSensor.BodyFrameSource.OpenReader();
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >
            {
                // Torso
                new Tuple <JointType, JointType>(JointType.Head, JointType.Neck),
                new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid),
                new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight),
                new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft),
                new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight),
                new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft),

                // Right Arm
                new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight),
                new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight),
                new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight),
                new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight),
                new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight),

                // Left Arm
                new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft),
                new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft),
                new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft),
                new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft),
                new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft),

                // Right Leg
                new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight),
                new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight),
                new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight),

                // Left Leg
                new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft),
                new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft),
                new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft)
            };

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>
            {
                new Pen(Brushes.Red, 6),
                new Pen(Brushes.Orange, 6),
                new Pen(Brushes.Green, 6),
                new Pen(Brushes.Blue, 6),
                new Pen(Brushes.Indigo, 6),
                new Pen(Brushes.Violet, 6)
            };

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            Sensor_IsAvailableChanged(null, null);
            //this.StatusText = this.kinectSensor.IsAvailable ? "Kinect podłączony"
            //                                                : "Nie znaleziono Kinect'a";

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.ImageSource = new DrawingImage(this.drawingGroup);

            if (multiSourceFrameReader != null)
            {
                multiSourceFrameReader.MultiSourceFrameArrived += MultisourceFrameArrived;
            }
        }
Ejemplo n.º 51
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // get the kinectSensor object
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // open the reader for the depth frames
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();

            // wire handler for frame arrival
            this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived;

            // get FrameDescription from DepthFrameSource
            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // allocate space to put the pixels being received and converted
            this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height];

            List <Color> colorList = new List <Color>();

            colorList.Add(Color.FromArgb(100, 0, 0, 255));
            colorList.Add(Color.FromArgb(150, 0, 255, 0));
            colorList.Add(Color.FromArgb(100, 70, 200, 0));
            colorList.Add(Color.FromArgb(100, 100, 180, 0));
            colorList.Add(Color.FromArgb(200, 200, 100, 0));
            colorList.Add(Color.FromArgb(200, 230, 70, 0));
            colorList.Add(Color.FromArgb(255, 255, 0, 0));
            colorList.Add(Color.FromArgb(0, 0, 0, 0));



            BitmapPalette bp = new BitmapPalette(colorList);

            // create the bitmap to display
            this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Indexed8, bp);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // get FrameDescription from the BodyFrameSource
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;


            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // use the window object as the view model
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.multiFrameSourceReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex);

            this.multiFrameSourceReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived;

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            FrameDescription depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            int depthWidth  = depthFrameDescription.Width;
            int depthHeight = depthFrameDescription.Height;

            FrameDescription colorFrameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            int colorWidth  = colorFrameDescription.Width;
            int colorHeight = colorFrameDescription.Height;

            this.colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];

            this.bitmap = new WriteableBitmap(colorWidth, colorHeight, 96.0, 96.0, PixelFormats.Bgra32, null);

            // Calculate the WriteableBitmap back buffer size
            this.bitmapBackBufferSize = (uint)((this.bitmap.BackBufferStride * (this.bitmap.PixelHeight - 1)) + (this.bitmap.PixelWidth * this.bytesPerPixel));

            //this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            this.kinectSensor.Open();

            //this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
            //                                              : Properties.Resources.NoSensorStatusText;

            this.DataContext = this;


            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
            backgroundswitch.SelectedIndex = 0;
            propsswitch.SelectedIndex      = 4;
            foregroundswitch.SelectedIndex = 4;
            clothswitch.SelectedIndex      = 4;
        }
Ejemplo n.º 53
0
        public MainWindow()
        {
            this.kinectSensor = KinectSensor.GetDefault();                           //pc에 연결된 kinectsonsor의 object를 kinectsensor클래스 변수에 저장

            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); //colordata open

            this.colorFrameReader.FrameArrived += this.Read_ColorFrameArrived;

            FrameDescription colorframeDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            //이미지 프레임의 속성 설정
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            this.colorBitmap = new WriteableBitmap(colorframeDescription.Width, colorframeDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);



            this.coordinateMapper = kinectSensor.CoordinateMapper;

            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            this.bodyFrameReader.FrameArrived += this.Read_BodyFrameArrived;

            if (this.bones != null)
            {
                this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));

                this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

                this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
                this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));
            }

            this.bodyColors = new List <Pen>();
            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));


            this.kinectSensor.Open();

            this.drawingGroup = new DrawingGroup();

            this.imageSource = new DrawingImage(this.drawingGroup);

            this.DataContext = this; //이게들어가야 데이터를 가져올 수 있다.(kinect에서)

            InitializeComponent();
        }
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // set the display specifics
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];

            // populate face result colors - one for each face index
            this.faceBrush = new List <Brush>()
            {
                Brushes.White,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Red,
                Brushes.LightBlue,
                Brushes.Yellow
            };

            //List<Brush> faceBrush;

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            //Generar Clave Unica por evento
            string clave_unica;

            strGuid            = System.Guid.NewGuid().ToString().ToUpper();
            clave_unica        = String.Format(strGuid);
            claveUnica.Content = clave_unica;


            //fecha
            fecha        = DateTimeOffset.Now.ToString("MM/dd/yyyy HH:mm:ss", System.Globalization.CultureInfo.InvariantCulture);
            time.Content = fecha;

            //nombre_pc
            nombrePC       = Environment.MachineName;
            nom_pc.Content = nombrePC;

            //color de cara id_referencia para saber de que cuerpo esta leyendo los datos
            //color_f = faceBrush.Count.ToString();
            //ColorCara.Content = color_f;
        }
Ejemplo n.º 55
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the depth (display) extents
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // get size of joint space
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // a bone defined as a line between two joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Right Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Left Arm
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Right Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Left Leg
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // populate body colors, one for each BodyIndex
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? "Running" : "Sensor not available";

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();

            //set up server endpoint
            IPEndPoint serverEndPt = new IPEndPoint(IPAddress.Parse(address), 8888);

            //initialize socket
            clientSocket = new Socket(serverEndPt.AddressFamily, SocketType.Stream, ProtocolType.Tcp);

            //connect to server
            clientSocket.Connect(serverEndPt);

            //get data stream
            serverStream = new NetworkStream(clientSocket);

            //send initial message telling server we are a kinect client
            string s = "kinect";

            Byte[] buffer = System.Text.Encoding.ASCII.GetBytes(s.ToCharArray());
            clientSocket.Send(buffer);

            Console.WriteLine("Client connected to server and sent initial hello");
        }
Ejemplo n.º 56
0
        /*****************************************************************************************************************/



        // Inicializa una instancia de la clase Play.
        public Play()
        {
            // Obtiene el sensor por defecto
            this.kinectSensor = KinectSensor.GetDefault();

            // Obtiene el mapa de coordenadas
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // Obtiene el tamaño de la imagen del sensor
            FrameDescription frameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;

            // Guarda el tamaño de la imagen del sensor
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;

            // Abre el lector de BodyFrames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // Los huesos se definen como una linea entre dos Joints
            this.bones = new List <Tuple <JointType, JointType> >();

            // Torso
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Head, JointType.Neck));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.Neck, JointType.SpineShoulder));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.SpineMid));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineMid, JointType.SpineBase));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineShoulder, JointType.ShoulderLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.SpineBase, JointType.HipLeft));

            // Brazo Derecho
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderRight, JointType.ElbowRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowRight, JointType.WristRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.HandRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandRight, JointType.HandTipRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristRight, JointType.ThumbRight));

            // Brazo Izquierdo
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ShoulderLeft, JointType.ElbowLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.ElbowLeft, JointType.WristLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.HandLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HandLeft, JointType.HandTipLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.WristLeft, JointType.ThumbLeft));

            // Pierna Derecha
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipRight, JointType.KneeRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeRight, JointType.AnkleRight));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleRight, JointType.FootRight));

            // Pierna Izquierda
            this.bones.Add(new Tuple <JointType, JointType>(JointType.HipLeft, JointType.KneeLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.KneeLeft, JointType.AnkleLeft));
            this.bones.Add(new Tuple <JointType, JointType>(JointType.AnkleLeft, JointType.FootLeft));

            // Lista de colores para cada Body dibujado
            this.bodyColors = new List <Pen>();

            this.bodyColors.Add(new Pen(Brushes.Red, 6));
            this.bodyColors.Add(new Pen(Brushes.Orange, 6));
            this.bodyColors.Add(new Pen(Brushes.Green, 6));
            this.bodyColors.Add(new Pen(Brushes.Blue, 6));
            this.bodyColors.Add(new Pen(Brushes.Indigo, 6));
            this.bodyColors.Add(new Pen(Brushes.Violet, 6));

            // Abre el sensor
            this.kinectSensor.Open();

            // Crea el drawingGroup sobre el que dibujaremos
            this.drawingGroup = new DrawingGroup();

            // Crea la imagen sobre la que vamos a trabajar
            this.imageSource = new DrawingImage(this.drawingGroup);

            // Usa esta ventana como la vista del modelo
            this.DataContext = this;

            // Inicializa los componentes de la vista
            this.InitializeComponent();


            /*****************************************************************************************************************/
            // Inicializa la batería

            MediaPlayer player    = new MediaPlayer();
            BitmapImage Image     = new BitmapImage();
            double      Reduction = 0;

            //Bass
            Image  = new BitmapImage(new Uri("pack://application:,,,/Images/Bass.png", UriKind.Absolute));
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/Bass.wav", UriKind.Relative));
            Reduction = 2.2;

            bass = new Drum(
                new Rect((this.displayWidth - (Image.Width / Reduction)) / 2,
                         this.displayHeight - (Image.Height / Reduction) - 10,
                         Image.Width / Reduction, Image.Height / Reduction),
                new Rect((this.displayWidth - (Image.Width / Reduction / 5)) / 2,
                         this.displayHeight - (Image.Height / Reduction / 10) - 10,
                         Image.Width / Reduction / 5, Image.Height / Reduction / 10),
                player,
                Image,
                Reduction,
                0
                );

            //Snare
            Image  = new BitmapImage(new Uri("pack://application:,,,/Images/Snare.png", UriKind.Absolute));
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/Snare.wav", UriKind.Relative));
            Reduction = 2;

            snare = new Drum(
                new Rect((this.displayWidth / 2) + (Image.Width / 8),
                         this.displayHeight - ((bass.Height) + (Image.Height / Reduction)) - 10,
                         Image.Width / Reduction, Image.Height / Reduction),
                new Rect((this.displayWidth / 2) + (Image.Width / 8),
                         this.displayHeight - ((bass.Height) + (Image.Height / Reduction)) - 15,
                         Image.Width / Reduction, Image.Height / Reduction + 10),
                player,
                Image,
                Reduction,
                1
                );


            //Middle Tom
            Image  = new BitmapImage(new Uri("pack://application:,,,/Images/MiddleTom.png", UriKind.Absolute));
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/MidTom.wav", UriKind.Relative));
            Reduction = 8.5;

            middleTom = new Drum(
                new Rect((this.displayWidth / 2) - (bass.Width / 2),
                         this.displayHeight - ((bass.Height) + (Image.Height / Reduction)) - 10,
                         Image.Width / Reduction, Image.Height / Reduction),
                new Rect((this.displayWidth / 2) - (bass.Width / 2) + 11,
                         this.displayHeight - ((bass.Height) + (Image.Height / Reduction)) - 10,
                         (Image.Width / Reduction) - 11, Image.Height / Reduction),
                player,
                Image,
                Reduction,
                1
                );

            //Floor Tom
            Image  = new BitmapImage(new Uri("pack://application:,,,/Images/FloorTom.png", UriKind.Absolute));
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/FloorTom.wav", UriKind.Relative));
            Reduction = 12;

            floorTom = new Drum(
                new Rect((this.displayWidth / 2) + (bass.Width / 2),
                         this.displayHeight - (Image.Height / Reduction) - 10,
                         Image.Width / Reduction, Image.Height / Reduction),
                new Rect((this.displayWidth / 2) + (bass.Width / 2) + 11,
                         this.displayHeight - (Image.Height / Reduction) - 10,
                         (Image.Width / Reduction) - 20, Image.Height / Reduction / 2),
                player,
                Image,
                Reduction,
                1
                );

            //Crash
            Image  = new BitmapImage(new Uri("pack://application:,,,/Images/Crash.png", UriKind.Absolute));
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/CrashCymbal.wav", UriKind.Relative));
            Reduction = 2;

            crash = new Drum(
                new Rect((this.displayWidth / 2) - (bass.Width / 2) - (Image.Width / 1.7),
                         this.displayHeight - (Image.Height / Reduction) - 10,
                         Image.Width / Reduction, Image.Height / Reduction),
                new Rect((this.displayWidth / 2) - (bass.Width / 2) - (Image.Width / 1.7) + 2,
                         this.displayHeight - (Image.Height / Reduction) - 10,
                         Image.Width / Reduction - 13, Image.Height / Reduction / 7.5),
                player,
                Image,
                Reduction,
                1
                );

            //Hihat
            Image  = new BitmapImage(new Uri("pack://application:,,,/Images/Hihat.png", UriKind.Absolute));
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/Hihat.wav", UriKind.Relative));
            MediaPlayer playerAux = new MediaPlayer();

            playerAux.Open(new Uri("Sounds/ClosedHihat.wav", UriKind.Relative));
            MediaPlayer playerPedal = new MediaPlayer();

            playerPedal.Open(new Uri("Sounds/ClosingHihat.wav", UriKind.Relative));
            Reduction = 5;

            hihat = new Hihat(
                new Rect((this.displayWidth / 2) - (bass.Width / 2) - (Image.Width / Reduction),
                         this.displayHeight - (Image.Height / Reduction) - 10,
                         Image.Width / Reduction, Image.Height / Reduction),
                new Rect((this.displayWidth / 2) - (bass.Width / 2) - (Image.Width / Reduction) + 8,
                         this.displayHeight - (Image.Height / Reduction) + 16 - 10,
                         Image.Width / Reduction - 16, Image.Height / Reduction / 6),
                new Rect((this.displayWidth / 2) - (bass.Width / 2) - (Image.Width / Reduction / 2) - (Image.Width / Reduction / 4 / 2),
                         this.displayHeight - (Image.Height / Reduction / 12) - 10,
                         Image.Width / Reduction / 3, Image.Height / Reduction / 12),
                player,
                playerAux,
                playerPedal,
                Image,
                Reduction
                );

            //Inicializa el vector de GestureDetector
            this.gestureDetectorList = new List <GestureDetector>();
            player = new MediaPlayer();
            player.Open(new Uri("Sounds/Drumsticks.wav", UriKind.Relative));

            for (int i = 0; i < maxBodies; ++i)
            {
                GestureDetector detector = new GestureDetector(this.kinectSensor, player);
                this.gestureDetectorList.Add(detector);
            }
            /*****************************************************************************************************************/
        }
Ejemplo n.º 57
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class
        /// </summary>
        public MainWindow()
        {
            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
            int col0Row   = 0;
            int col1Row   = 0;
            int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;

            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result   = new GestureResultView(i, false, false, 0.0f);
                GestureDetector   detector = new GestureDetector(this.kinectSensor, result);
                this.gestureDetectorList.Add(detector);

                // split gesture results across the first two columns of the content grid
                ContentControl contentControl = new ContentControl();
                contentControl.Content = this.gestureDetectorList[i].GestureResultView;

                if (i % 2 == 0)
                {
                    // Gesture results for bodies: 0, 2, 4
                    Grid.SetColumn(contentControl, 0);
                    Grid.SetRow(contentControl, col0Row);
                    ++col0Row;
                }
                else
                {
                    // Gesture results for bodies: 1, 3, 5
                    Grid.SetColumn(contentControl, 1);
                    Grid.SetRow(contentControl, col1Row);
                    ++col1Row;
                }

                this.contentGrid.Children.Add(contentControl);
            }
        }
Ejemplo n.º 58
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class.
        /// </summary>
        public MainWindow()
        {
            // one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // get the coordinate mapper
            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            // get the color frame details
            FrameDescription frameDescription = this.kinectSensor.ColorFrameSource.FrameDescription;

            // set the display specifics
            this.displayWidth  = frameDescription.Width;
            this.displayHeight = frameDescription.Height;
            this.displayRect   = new Rect(0.0, 0.0, this.displayWidth, this.displayHeight);

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // wire handler for body frame arrival
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // set the maximum number of bodies that would be tracked by Kinect
            this.bodyCount = this.kinectSensor.BodyFrameSource.BodyCount;

            // allocate storage to store body objects
            this.bodies = new Body[this.bodyCount];

            // specify the required face frame results
            FaceFrameFeatures faceFrameFeatures =
                FaceFrameFeatures.BoundingBoxInColorSpace
                | FaceFrameFeatures.PointsInColorSpace
                | FaceFrameFeatures.RotationOrientation
                | FaceFrameFeatures.FaceEngagement
                | FaceFrameFeatures.Glasses
                | FaceFrameFeatures.Happy
                | FaceFrameFeatures.LeftEyeClosed
                | FaceFrameFeatures.RightEyeClosed
                | FaceFrameFeatures.LookingAway
                | FaceFrameFeatures.MouthMoved
                | FaceFrameFeatures.MouthOpen;

            // create a face frame source + reader to track each face in the FOV
            this.faceFrameSources = new FaceFrameSource[this.bodyCount];
            this.faceFrameReaders = new FaceFrameReader[this.bodyCount];
            for (int i = 0; i < this.bodyCount; i++)
            {
                // create the face frame source with the required face frame features and an initial tracking Id of 0
                this.faceFrameSources[i] = new FaceFrameSource(this.kinectSensor, 0, faceFrameFeatures);

                // open the corresponding reader
                this.faceFrameReaders[i] = this.faceFrameSources[i].OpenReader();
            }

            // allocate storage to store face frame results for each face in the FOV
            this.faceFrameResults = new FaceFrameResult[this.bodyCount];

            // populate face result colors - one for each face index
            this.faceBrush = new List <Brush>()
            {
                Brushes.White,
                Brushes.Orange,
                Brushes.Green,
                Brushes.Red,
                Brushes.LightBlue,
                Brushes.Yellow
            };

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            // use the window object as the view model in this simple example
            this.DataContext = this;

            // initialize the components (controls) of the window
            this.InitializeComponent();
        }
Ejemplo n.º 59
0
 /// <summary>
 /// 初始化骨架
 /// </summary>
 private void InitializeBodyFrame()
 {
     //骨架、骨架事件
     bodyFrameReader = _sensor.BodyFrameSource.OpenReader();
     bodyFrameReader.FrameArrived += BodyFrameReader_FrameArrived;
 }
Ejemplo n.º 60
0
        public MainWindow()
        {
            InitializeComponent();
            KinectRegion.SetKinectRegion(this, kinectRegion);
            kinectRegion.KinectSensor = KinectSensor.GetDefault();
            // only one sensor is currently supported
            //kinectRegion.KinectSensor = KinectSensor.GetDefault();
            // set IsAvailableChanged event notifier
            kinectRegion.KinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            //kinectRegion.KinectSensor.Open();
            // set the status text
            this.StatusText = kinectRegion.KinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = kinectRegion.KinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;
            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            kinectBodyView = new KinectBodyView(kinectRegion.KinectSensor);

            // initialize the MainWindow
            //this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            //this.kinectBodyViewbox.DataContext = this.kinectBodyView;


            int col0Row   = 0;
            int col1Row   = 0;
            int maxBodies = kinectRegion.KinectSensor.BodyFrameSource.BodyCount;



            a     = 40;
            chord = 0;

            /*GestureResultView result = new GestureResultView(0, false, false, 0.0f, false);
             *
             * GestureDetector detector = new GestureDetector(kinectRegion.KinectSensor, result);
             * this.gestureDetectorList.Add(detector);
             *
             * // split gesture results across the first two columns of the content grid
             * ContentControl contentControl = new ContentControl();
             * contentControl.Content = this.gestureDetectorList[0].GestureResultView;*/

            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result   = new GestureResultView(i, false, false, 0.0f, false);
                GestureDetector   detector = new GestureDetector(kinectRegion.KinectSensor, result);
                this.gestureDetectorList.Add(detector);

                // split gesture results across the first two columns of the content grid
                ContentControl contentControl = new ContentControl();
                contentControl.Content = this.gestureDetectorList[i].GestureResultView;

                if (i % 2 == 0)
                {
                    // Gesture results for bodies: 0, 2, 4
                    Grid.SetColumn(contentControl, 0);
                    Grid.SetRow(contentControl, col0Row);
                    ++col0Row;
                }
                else
                {
                    // Gesture results for bodies: 1, 3, 5
                    Grid.SetColumn(contentControl, 1);
                    Grid.SetRow(contentControl, col1Row);
                    ++col1Row;
                }

                //this.contentGrid.Children.Add(contentControl);
            }

            //this.ImageSource = this.notSeatedImage;
            Loaded += MainWindow_Loaded;

            dev = devInfo.CreateDevice();
            dev.Open();
        }