// Primary function. Runs when the window loads in. private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
private KinectStreamer() { KinectStreamerConfig = new KinectStreamerConfig(); kinectSensor = KinectSensor.GetDefault(); CoordinateMapper = kinectSensor.CoordinateMapper; multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.Body); multiSourceFrameReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; ColorFrameDescription = kinectSensor.ColorFrameSource.FrameDescription; DepthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription; depthBitmap = new WriteableBitmap(DepthFrameDescription.Width, DepthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); colorBitmap = new WriteableBitmap(ColorFrameDescription.Width, ColorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); bitmapBackBufferSize = (uint)((colorBitmap.BackBufferStride * (colorBitmap.PixelHeight - 1)) + (colorBitmap.PixelWidth * this.bytesPerPixel)); colorPixels = new byte[ColorFrameDescription.Width * ColorFrameDescription.Height]; depthPixels = new byte[DepthFrameDescription.Width * DepthFrameDescription.Height]; depthArray = new ushort[DepthFrameDescription.Width * DepthFrameDescription.Height]; SetupBody(); kinectSensor.Open(); }
public KinectReader(WriteableBitmap depthBitmap, WriteableBitmap colorBitmap) { this.depthBitmap = depthBitmap; this.colorBitmap = colorBitmap; this.sensor = KinectSensor.GetDefault(); sensor.Open(); this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color); reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; }
public Core() { InitExercises(); sensor = KinectSensor.GetDefault(); sensor.Open(); reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); frameLock = new Mutex(); }
public MainWindow() { InitializeComponent(); mySensor = KinectSensor.GetDefault(); mySensor.Open(); myReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body); myReader.MultiSourceFrameArrived += myReader_MultiSourceFrameArrived; myBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Pbgra32, null); image1.Source = myBitmap; }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
/// <summary> /// Constructor /// </summary> /// <param name="port">Server port</param> /// <param name="sensor"></param> public KinectFrameServer(int port, KinectSensor sensor) { this.sensor = sensor; this.listener = new KinectClientListener(port); this.listener.ClientConnected += listener_ClientConnected; this.depthCompressor = new SnappyFrameCompressor(KinectFrameInformation.DepthFrame); this.bodyIndexCompressor = new SnappyFrameCompressor(KinectFrameInformation.BodyIndexFrame); this.multiSourceReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex | FrameSourceTypes.Depth); this.multiSourceReader.MultiSourceFrameArrived += multiSourceReader_MultiSourceFrameArrived; }
public BackgroundRemovalPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
private void Window_Loaded(object sender, RoutedEventArgs e) { _kinectSensor = KinectSensor.GetDefault(); InitializeInfrared(); _kinectSensor.Open(); if (_kinectSensor != null) { _utils = new Utils(_kinectSensor.CoordinateMapper); _reader = _kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex); } _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); // 2) Initialize the background removal tool. _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
static void Main(string[] args) { sensor = KinectSensor.GetDefault(); sensor.IsAvailableChanged += Sensor_IsAvailableChanged; sensor.Open(); _multiReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body); _multiReader.MultiSourceFrameArrived += OnMultipleFramesArrivedHandler; VirtualMouse.MoveTo(900, 39); VirtualMouse.LeftClick(); Console.ReadKey(); }
public GesturesPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _gestureController = new GestureController(); _gestureController.GestureRecognized += GestureController_GestureRecognized; } }
public FrameReader() { _sensor = KinectSensor.GetDefault(); if (_sensor == null) { throw new ScannerNotFoundException("No valid plugged-in Kinect sensor found."); } if (!_sensor.IsOpen) { _sensor.Open(); } _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; }
private void InitializeSceen() { _sensor = KinectSensor.GetDefault(); _sensor.Open(); FrameDescription colorFrameDescription = _sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); // create the bitmap to display this._colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); _jedi = new JediGestureRecognizer(_sensor); _jedi.ForceApplying += ForceApplying; _jedi.ForceDispel += ForceDispel; _frameReader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Depth | FrameSourceTypes.Color); _frameReader.MultiSourceFrameArrived +=MultiSourceFrameArrived; }
public AnglePage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _userReporter = new PlayersController(); _userReporter.BodyEntered += UserReporter_BodyEntered; _userReporter.BodyLeft += UserReporter_BodyLeft; _userReporter.Start(); } }
protected void MainWindow_Loaded(object sender, RoutedEventArgs e) { this.kinect = KinectSensor.GetDefault(); kinect.Open(); if (kinect.IsOpen) { kinectStatusLabel.Content = "Kinect Ready!"; } else { kinectStatusLabel.Content = "Kinect not Ready!"; } frameReader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Infrared | FrameSourceTypes.Depth | FrameSourceTypes.Color); frameReader.MultiSourceFrameArrived += MainWindow_KinectFrameArrived; }
public CameraPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) // pega o sensor que está ativo { _sensor.Open(); // "abre" o sensor // especifica quais streams poderão ser acessados _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _playersController = new PlayersController(); _playersController.BodyEntered += UserReporter_BodyEntered; _playersController.BodyLeft += UserReporter_BodyLeft; _playersController.Start(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); for (int t = 0; t <= NumbersOfTarget; t++) { Target target = new Target(t); TargetList.Add(target); } pointTarget = new Target(0); if (_sensor != null) { _sensor.Open(); this.coordinateMapper = this._sensor.CoordinateMapper; _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; this.colorBitmap = new WriteableBitmap(this._sensor.ColorFrameSource.FrameDescription.Width, this._sensor.ColorFrameSource.FrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); this.boolPixels = new int[this._sensor.DepthFrameSource.FrameDescription.LengthInPixels]; checkBoxShowXYZ.IsChecked = true; } }
public MainWindow() { this.kinectSensor = KinectSensor.GetDefault(); this.multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); this.multiSourceFrameReader.MultiSourceFrameArrived += this.MultiSourceFrameArrived; // create image data colorFrameDescription = kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); this.rawColorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null); depthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription; depthPixels = new byte[depthFrameDescription.Width * depthFrameDescription.Height]; this.depthBitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Gray8, null); this.colorBitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null); kinectSensor.Open(); this.DataContext = this; InitializeComponent(); }
protected void MainWindow_Loaded(object sender, RoutedEventArgs e) { //Initialize the Kinect _kinect = KinectSensor.GetDefault(); _kinect.Open(); //If the sensor is ready, update the UI if (_kinect.IsOpen) { kinectStatusLabel.Content = "READY"; kinectStatusLabel.Foreground = new SolidColorBrush(Colors.Green); _frameReader = _kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Infrared | FrameSourceTypes.Depth | FrameSourceTypes.Color); _frameReader.MultiSourceFrameArrived += MainWindow_KinectFrameArrived; } else { Debug.WriteLine("Kinect not working!"); } _playbackThread = new AudioPlaybackThread(); _playbackThread.StartAudioPlayback(); }
public Kinectv2() { _sensor = KinectSensor.GetDefault(); if (_sensor == null) { return; } _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader( FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex); if (_sensor == null) { return; } MostRecentColorFrame = new byte[CalculateImageByteCount(ColorFrameDescription)]; MostRecentDepthFrame = new byte[CalculateImageByteCount(DepthFrameDescription)]; MostRecentInfraredFrame = new byte[CalculateImageByteCount(InfraredFrameDescription)]; MostRecentSilhouetteFrame = new byte[CalculateImageByteCount(SilhouetteFrameDescription)]; MostRecentGestures = new GestureResults(_sensor.BodyFrameSource.BodyCount); ColorFrameSize = CalculateImageSize(ColorFrameDescription); DepthFrameSize = CalculateImageSize(DepthFrameDescription); InfraredFrameSize = CalculateImageSize(InfraredFrameDescription); SilhouetteFrameSize = CalculateImageSize(SilhouetteFrameDescription); _gestureTrackers = new List<GestureTracker>(); }
private void Window_Loaded( object sender, RoutedEventArgs e ) { try { kinect = KinectSensor.GetDefault(); if ( kinect == null ) { throw new Exception( "Kinectを開けません" ); } kinect.Open(); coordinateMapper = kinect.CoordinateMapper; multiFrameReader = kinect.OpenMultiSourceFrameReader( FrameSourceTypes.Color | FrameSourceTypes.Depth ); multiFrameReader.MultiSourceFrameArrived += multiFrameReader_MultiSourceFrameArrived; FrameDescription depthFrameDescription = kinect.DepthFrameSource.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; depthImagePixels = new ushort[depthWidth*depthHeight]; FrameDescription colorFrameDescription = kinect.ColorFrameSource.CreateFrameDescription( ColorImageFormat.Bgra ); colorWidth = colorFrameDescription.Width; colorHeight = colorFrameDescription.Height; colorBytesPerPixel = colorFrameDescription.BytesPerPixel; colorImagePixels = new byte[colorWidth * colorHeight * colorBytesPerPixel]; drawBitmap = new WriteableBitmap( depthWidth, depthHeight, 96, 96, PixelFormats.Bgra32, null ); drawStride = depthWidth * (int)colorBytesPerPixel; drawRect = new Int32Rect( 0, 0, depthWidth, depthHeight ); drawBuffer = new byte[drawStride * depthHeight]; ImageColor.Source = drawBitmap; InitializeFusion(); } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
void KinectStart() { m_kinect = KinectSensor.GetDefault(); m_coordinateMapper = m_kinect.CoordinateMapper; m_kinect.Open(); try { FrameDescription depthFrameDescription = m_kinect.DepthFrameSource.FrameDescription; int depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; /* // create the bitmap to display m_bitmap = new WriteableBitmap(depthWidth, depthHeight, 96.0, 96.0, PixelFormats.Bgra32, null); */ FrameDescription colorFrameDescription = m_kinect.ColorFrameSource.FrameDescription; int colorWidth = colorFrameDescription.Width; int colorHeight = colorFrameDescription.Height; m_viewportSize = new Vector2(colorWidth, colorHeight); m_colorFrameData = new byte[colorWidth * colorHeight * BytesPerColorPixel]; m_displayPixels = new byte[colorWidth * colorHeight * BytesPerColorPixel]; m_displayTexture = new Texture2D(m_graphicsDevice, colorWidth, colorHeight, mipMap: true, format: SurfaceFormat.Color); m_colorToDepthSpacePoints = new DepthSpacePoint[colorWidth * colorHeight]; m_depthToColorSpacePoints = new ColorSpacePoint[depthWidth * depthHeight]; m_reader = m_kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); m_reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; if (m_bodies == null) { int bodyCount = 6; // m_kinect.BodyFrameSource.BodyCount; m_bodies = new List<Body>(bodyCount); for (int i = 0; i < bodyCount; i++) { m_bodies.Add(null); } m_holofunkBodies = new List<HolofunkBody>(PlayerCount); for (int i = 0; i < PlayerCount; i++) { m_holofunkBodies.Add(new HolofunkBody()); } } } catch (InvalidOperationException) { HoloDebug.Assert(false); return; } m_lastTime = DateTime.Now; }
private void StartTraceableMode() { SetUIInTrackingMode(); //ExerciseVideo.Play(); _sensor = KinectSensor.GetDefault(); //todo - check, not working if (_sensor != null) { _sensor.Open(); // 2) Initialize the background removal tool. _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper); _drawSkeleton = new DrawSkeleton(_sensor, (int)(KinectSkeleton.Width), (int)(KinectSkeleton.Height)); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; //Gesture detection Exercise tempExercise = CurrentExercise; _gestureAnalysis = new GestureAnalysis(ref tempExercise); _gestureDetector = new GestureDetector(_sensor, _gestureAnalysis, CurrentExercise); _gestureAnalysis.startGestureDeteced += _gestureAnalysis_startGestureDeteced; CurrentExercise.CreateRounds(); //_timer.Start(); } ExerciseVideo.Play(); inPlayMode = true; }
private void StartNonTraceableMode() { SetUIInNoTrackableMode(); _sensor = KinectSensor.GetDefault(); //todo - check, not working if (_sensor != null) { _sensor.Open(); // 2) Initialize the background removal tool. _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper); _drawSkeleton = new DrawSkeleton(_sensor, (int)(KinectSkeleton.Width), (int)(KinectSkeleton.Height)); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived_NonTraceable; } }
static void Main(string[] args) { Console.WriteLine("Press any key to start the WebSocketServer!"); Console.ReadKey(); Console.WriteLine(); ks = KinectSensor.GetDefault(); var fd = ks.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); uint frameSize = fd.BytesPerPixel * fd.LengthInPixels; colorData.Data = new byte[frameSize]; colorData.Format = ColorImageFormat.Bgra; msfr = ks.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color); msfr.MultiSourceFrameArrived += msfr_MultiSourceFrameArrived; bodies = new Body[ks.BodyFrameSource.BodyCount]; ks.Open(); appServer = new WebSocketServer(); var config = new ServerConfig(); config.Name = "kinect"; config.Port = 2012; config.MaxRequestLength = (int)frameSize; // Setup the appServer if (!appServer.Setup(config)) //Setup with listening port { Console.WriteLine("Failed to setup!"); Console.ReadKey(); return; } Console.WriteLine(); // Try to start the appServer if (!appServer.Start()) { Console.WriteLine("Failed to start!"); Console.ReadKey(); return; } Console.WriteLine("The server started successfully, press key 'q' to stop it!"); while (Console.ReadKey().KeyChar != 'q') { Console.WriteLine(); continue; } //Stop the appServer appServer.Stop(); Console.WriteLine(); Console.WriteLine("The server was stopped!"); Console.ReadKey(); }
public void Initialize() { KSensor = KinectSensor.GetDefault(); FrameSourceTypes ftypes = FrameSourceTypes.None; if (TrackSkeleton) ftypes |= FrameSourceTypes.Body; if (TrackDepth) ftypes |= FrameSourceTypes.Depth; if (TrackColor) ftypes |= FrameSourceTypes.Color; coordinateMapper = this.KSensor.CoordinateMapper; multireader = KSensor.OpenMultiSourceFrameReader(ftypes); multireader.MultiSourceFrameArrived += multireader_MultiSourceFrameArrived; KSensor.Open(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { Process.Start(@"C:\Windows\System32\KinectService.exe"); _sensor = KinectSensor.Default; if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _gestureController = new GestureController(GestureType.All); _gestureController.GestureRecognized += GestureController_GestureRecognized; } }
/// <summary> /// Kinectセンサーを初期化し、データの取得用に各種変数を初期化します /// </summary> private void Initialize() { // Kinectセンサーを取得 this.kinect = KinectSensor.GetDefault(); if (kinect == null) return; // KinectセンサーからBody(骨格情報)とColor(色情報)を取得するFrameReaderを作成 reader = kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Body); reader.MultiSourceFrameArrived += OnMultiSourceFrameArrived; // Kinectセンサーから詳細なFaceTrackingを行う、ソースとFrameReaderを宣言 // 1st persion this.hdFaceFrameSource = new HighDefinitionFaceFrameSource(this.kinect); this.hdFaceFrameSource.TrackingIdLost += this.OnTrackingIdLost; this.hdFaceFrameReader = this.hdFaceFrameSource.OpenReader(); this.hdFaceFrameReader.FrameArrived += this.OnFaceFrameArrived; this.faceModel = new FaceModel(); this.faceAlignment = new FaceAlignment(); this._colorReader = this.kinect.ColorFrameSource.OpenReader(); this._colorReader.FrameArrived += ColorReader_FrameArrived; // 各種Viewのアップデート InitializeMesh(); UpdateMesh(); // センサーの開始 kinect.Open(); }
/* * Executed when window initially loads */ private void Window_Loaded(object sender, RoutedEventArgs e) { // Following code will read in values from the config file string line; int curVal = 1; System.IO.StreamReader file = new System.IO.StreamReader("config.txt"); while ((line = file.ReadLine()) != null) { if (line.StartsWith("//") || line.Equals("")) curVal--; else if (curVal == 1) GS_STOP = Convert.ToDouble(line); else if (curVal == 2) BA_STEP = Convert.ToDouble(line); else if (curVal == 3) BA_TIME = Convert.ToDouble(line); else if (curVal == 4) GUAG_HEIGHT = Convert.ToDouble(line); else if (curVal == 5) GUAG_LEAVE = Convert.ToDouble(line); else if (curVal == 6) GUAG_RETURN_MAX = Convert.ToDouble(line); else if (curVal == 7) GUAG_RETURN_MIN = Convert.ToDouble(line); else if (curVal == 8) GUAG_SIT_DIST = Convert.ToDouble(line); else if (curVal == 9) CR_START_HEIGHT = Convert.ToDouble(line); else if (curVal == 10) CR_HEIGHT = Convert.ToDouble(line); else if (curVal == 11) CR_REPS = Convert.ToDouble(line); else if (curVal == 12) KINECT_PATH = line; curVal++; } file.Close(); // PATH BELOW CAN BE CHANGED DEPENDING ON WHERE KINECT HAS BEEN INSTALLED Process.Start(@KINECT_PATH); // Starts KinectService - required to use application // NEXT LINES - Initialize some display settings camera.Width = System.Windows.SystemParameters.PrimaryScreenWidth * 2 / 3; camera.Height = System.Windows.SystemParameters.PrimaryScreenHeight; camera.HorizontalAlignment = System.Windows.HorizontalAlignment.Left; canvas.Height = camera.Height; camera.VerticalAlignment = System.Windows.VerticalAlignment.Top; // Initialize stopwatch, tick every millisecond dispatcherTimer.Tick += new EventHandler(dispatcherTimer_Tick); dispatcherTimer.Interval = new TimeSpan(0, 0, 0, 0, 1); // Initialize Kinect camera _sensor = KinectSensor.GetDefault(); // Open camera and receive frame reader if sensor exists if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }