public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
// Primary function. Runs when the window loads in. private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
private void Window_Loaded( object sender, RoutedEventArgs e ) { try { // Kinectを開く kinect = KinectSensor.GetDefault(); kinect.Open(); // 赤外線画像の情報を取得する infraredFrameDesc = kinect.InfraredFrameSource.FrameDescription; // 赤外線リーダーを開く infraredFrameReader = kinect.InfraredFrameSource.OpenReader(); infraredFrameReader.FrameArrived += infraredFrameReader_FrameArrived; // 表示のためのビットマップに必要なものを作成 infraredBuffer = new ushort[infraredFrameDesc.LengthInPixels]; infraredBitmap = new WriteableBitmap( infraredFrameDesc.Width, infraredFrameDesc.Height, 96, 96, PixelFormats.Gray16, null ); infraredRect = new Int32Rect( 0, 0, infraredFrameDesc.Width, infraredFrameDesc.Height ); infraredStride = infraredFrameDesc.Width * (int)infraredFrameDesc.BytesPerPixel; ImageInfrared.Source = infraredBitmap; } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
//Run the application async static async Task RunAsync() { //Get the default Kinect Sensor _kinectSensor = KinectSensor.GetDefault(); // open the reader for the body frames _bodyFrameReader = _kinectSensor.BodyFrameSource.OpenReader(); // Set the coordinate Mapper _coordinateMapper = _kinectSensor.CoordinateMapper; //open the sensor _kinectSensor.Open(); //Check if the Sensor is available Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort")); while (!_kinectSensor.IsAvailable) { //wait for sensor } Console.WriteLine("Kinect sensor is " + (_kinectSensor.IsAvailable ? "available " : "missing. Waiting for sensor: press ctrl + c to abort")); //Init gesture _handOverHeadDetector = new HandOverHeadDetector(HandDetectionType.BothHands, HandState.Open); //Subscribe to completed event _handOverHeadDetector.GestureCompleteEvent += HandOverHeadDetectorOnGestureCompleteEvent; //Start reciving kinect Frames if (_bodyFrameReader != null) { _bodyFrameReader.FrameArrived += Reader_FrameArrived; } }
public KinectHandler() { instance = this; kinectSensor = KinectSensor.GetDefault(); kinectSensor.CoordinateMapper.CoordinateMappingChanged += CoordinateMapper_CoordinateMappingChanged; kinectSensor.Open(); }
public MainWindow() { // Get the sensor sensor = KinectSensor.GetDefault(); sensor.Open(); // Setup readers for each source of data we want to use colorFrameReader = sensor.ColorFrameSource.OpenReader(); bodyFrameReader = sensor.BodyFrameSource.OpenReader(); // Setup event handlers that use what we get from the readers colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; // Get ready to draw graphics drawingGroup = new DrawingGroup(); // Initialize the components (controls) of the window InitializeComponent(); // Initialize color components // create the bitmap to display colorBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Bgr32, null); ColorImage.Source = colorBitmap; // Initialize the game components birdHeight = this.Height / 2; // put the bird in the middle of the screen prevRightHandHeight = 0; prevLeftHandHeight = 0; pipeX = -1; pipeGapY = 250; pipeGapLength = 170; randomGenerator = new Random(); }
/// <summary> /// The main window of the app. /// </summary> public MainWindow() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _depthReader = _sensor.DepthFrameSource.OpenReader(); _depthReader.FrameArrived += DepthReader_FrameArrived; _infraredReader = _sensor.InfraredFrameSource.OpenReader(); _infraredReader.FrameArrived += InfraredReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; // Initialize the HandsController and subscribe to the HandsDetected event. _handsController = new HandsController(); _handsController.HandsDetected += HandsController_HandsDetected; _sensor.Open(); } }
private void Window_Loaded( object sender, RoutedEventArgs e ) { try { kinect = KinectSensor.GetDefault(); if ( kinect == null ) { throw new Exception("Kinectを開けません"); } kinect.Open(); // 表示のためのデータを作成 depthFrameDesc = kinect.DepthFrameSource.FrameDescription; // 表示のためのビットマップに必要なものを作成 depthImage = new WriteableBitmap( depthFrameDesc.Width, depthFrameDesc.Height, 96, 96, PixelFormats.Gray16, null ); depthBuffer = new ushort[depthFrameDesc.LengthInPixels]; depthRect = new Int32Rect( 0, 0, depthFrameDesc.Width, depthFrameDesc.Height ); depthStride = (int)(depthFrameDesc.Width * depthFrameDesc.BytesPerPixel); ImageDepth.Source = depthImage; // 初期の位置表示座標 depthPoint = new Point( depthFrameDesc.Width / 2, depthFrameDesc.Height / 2 ); // Depthリーダーを開く depthFrameReader = kinect.DepthFrameSource.OpenReader(); depthFrameReader.FrameArrived += depthFrameReader_FrameArrived; } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
private KinectStreamer() { KinectStreamerConfig = new KinectStreamerConfig(); kinectSensor = KinectSensor.GetDefault(); CoordinateMapper = kinectSensor.CoordinateMapper; multiSourceFrameReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color | FrameSourceTypes.Body); multiSourceFrameReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; ColorFrameDescription = kinectSensor.ColorFrameSource.FrameDescription; DepthFrameDescription = kinectSensor.DepthFrameSource.FrameDescription; depthBitmap = new WriteableBitmap(DepthFrameDescription.Width, DepthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); colorBitmap = new WriteableBitmap(ColorFrameDescription.Width, ColorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); bitmapBackBufferSize = (uint)((colorBitmap.BackBufferStride * (colorBitmap.PixelHeight - 1)) + (colorBitmap.PixelWidth * this.bytesPerPixel)); colorPixels = new byte[ColorFrameDescription.Width * ColorFrameDescription.Height]; depthPixels = new byte[DepthFrameDescription.Width * DepthFrameDescription.Height]; depthArray = new ushort[DepthFrameDescription.Width * DepthFrameDescription.Height]; SetupBody(); kinectSensor.Open(); }
public MainWindow() { InitializeComponent(); var hubConnection = new HubConnection("http://divewakeweb.azurewebsites.net/"); stockTickerHubProxy = hubConnection.CreateHubProxy("WakeHub"); hubConnection.Start().Wait(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public MainWindow() { _kinectSensor = KinectSensor.GetDefault(); _depthFrameDescription = _kinectSensor.DepthFrameSource.FrameDescription; _depthFrameReader = _kinectSensor.DepthFrameSource.OpenReader(); _depthFrameReader.FrameArrived += Reader_FrameArrived; _cameraSpacePoints = new CameraSpacePoint[_depthFrameDescription.Width * _depthFrameDescription.Height]; _trackingDiagnostics = new TrackingDiagnostics(); _heatMap = new HeatMap(); _energyHistory = new EnergyHistory(); _temporalMedianImage = new TemporalMedianImage(GlobVar.TemporalFrameCounter); _stopwatch = new Stopwatch(); BodiesHistory.Initialize(); GlobVar.CoordinateMapper = _kinectSensor.CoordinateMapper; GlobVar.TimeStamps = new List<TimeSpan>(); // initialize the components (controls) of the GUI window InitializeComponent(); _kinectSensor.Open(); }
public KinectController() { kinectSensor = KinectSensor.GetDefault(); // open the reader for the body frames bodyReader = kinectSensor.BodyFrameSource.OpenReader(); kinectSensor.Open(); Arm = ArmPointing.Nothing; hasPointed = false; lastAveragePositionLeft = 0f; lastAveragePositionRight = 0f; frameCounterLeft = 0; frameCounterRight = 0; if (!File.Exists(OPT_FILE)) { offsetX = 0; offsetY = 0; } else { string data = File.ReadAllText(OPT_FILE); List <float> offset = JsonConvert.DeserializeObject<List<float>>(data); offsetX = offset[0]; offsetY = offset[1]; } }
public MainWindow() { InitializeComponent(); network.init(); _sensor = KinectSensor.GetDefault(); if(_sensor != null) { _sensor.Open(); // Identify the bodies _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Initialize the face source with the desired features, some are commented out, include later. _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace);// | /* FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed); */ _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public KinectReader(WriteableBitmap depthBitmap, WriteableBitmap colorBitmap) { this.depthBitmap = depthBitmap; this.colorBitmap = colorBitmap; this.sensor = KinectSensor.GetDefault(); sensor.Open(); this.reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color); reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; }
public Core() { InitExercises(); sensor = KinectSensor.GetDefault(); sensor.Open(); reader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); frameLock = new Mutex(); }
private bool Setup() { filepath = System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal); sensor = KinectSensor.GetDefault(); sensor.IsAvailableChanged += sensor_IsAvailableChanged; depth = sensor.DepthFrameSource.OpenReader(); depth.FrameArrived += depth_FrameArrived; sensor.Open(); return true; }
public DebugWindow(Controller.GameController GameController) { InitializeComponent(); db = new Helpers.DBHelper(); attGameController = GameController; attks = KinectSensor.GetDefault(); attks.Open(); attBodyFrameReader = attks.BodyFrameSource.OpenReader(); attBodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; }
public MainWindow() { InitializeComponent(); mySensor = KinectSensor.GetDefault(); mySensor.Open(); myReader = mySensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Body); myReader.MultiSourceFrameArrived += myReader_MultiSourceFrameArrived; myBitmap = new WriteableBitmap(1920, 1080, 96.0, 96.0, PixelFormats.Pbgra32, null); image1.Source = myBitmap; }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
/// <summary> /// Enables tracking and starts the sensor, if there is one attached /// </summary> public void OpenSensor() { Sensor = KinectSensor.GetDefault(); Console.WriteLine("- Opening sensor: {0}", SensorId); Sensor.Open(); foreach (var handler in _frameHandlers) { handler.OnStart(); } IsRunning = true; }
public BackgroundRemovalPage() { InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
private void Window_Loaded(object sender, RoutedEventArgs e) { _kinectSensor = KinectSensor.GetDefault(); InitializeInfrared(); _kinectSensor.Open(); if (_kinectSensor != null) { _utils = new Utils(_kinectSensor.CoordinateMapper); _reader = _kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex); } _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; }
private void Window_Loaded( object sender, RoutedEventArgs e ) { try { kinect = KinectSensor.GetDefault(); if ( kinect == null ) { throw new Exception("Kinectを開けません"); } kinect.Open(); } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
/// <summary> /// Initializes a new instance of the <see cref="KinectHandler"/> class. /// </summary> /// <param name="Messenger">The Messenger which is used for thread- und component-synchronization.</param> /// <param name="performanceAnalyzer">The PerformanceAnalyzer which tracks the kinect's performance values.</param> public KinectHandler( SeeingSharpMessenger Messenger, PerformanceAnalyzer performanceAnalyzer) { if (!SeeingSharpApplication.IsInitialized) { return; } m_Messenger = Messenger; m_performanceAnalyzer = performanceAnalyzer; m_sensor = KinectSensor.GetDefault(); if (m_sensor == null) { return; } m_sensor.IsAvailableChanged += OnSensor_IsAvailableChanged; m_sensor.Open(); }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); // 2) Initialize the background removal tool. _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
public MainWindow() { InitializeComponent(); network.init(); _sensor = KinectSensor.GetDefault(); if(_sensor != null) { _sensor.Open(); bodyCount = _sensor.BodyFrameSource.BodyCount; // Identify the bodies _bodies = new Body[bodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Initialize the face source with the desired features. _faceSources = new FaceFrameSource[bodyCount]; _faceReaders = new FaceFrameReader[bodyCount]; for(int i = 0; i < bodyCount; i++) { // Create the face frame source with the required features and initial tracking id of 0 _faceSources[i] = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace); // open the corresponding reader _faceReaders[i] = _faceSources[i].OpenReader(); _faceReaders[i].FrameArrived += FaceReader_FrameArrived; } _faceResults = new FaceFrameResult[bodyCount]; // Set the arrays and values for person switches and timeouts personSize = 3; ims = new Image[3] {maskImage, maskImage2, maskImage3}; trackedInd = new bool[3] { false, false, false }; _persons = new Person[personSize]; for(int i = 0; i < personSize; i++) { _persons[i] = new Person(0, ims[i], -1); } paths = new String[3] { "pack://application:,,,/Images/tinfoil.png", "pack://application:,,,/Images/cowboy.png", "pack://application:,,,/Images/napolean.png"}; } }
static void Main(string[] args) { sensor = KinectSensor.GetDefault(); sensor.IsAvailableChanged += Sensor_IsAvailableChanged; sensor.Open(); _multiReader = sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body); _multiReader.MultiSourceFrameArrived += OnMultipleFramesArrivedHandler; VirtualMouse.MoveTo(900, 39); VirtualMouse.LeftClick(); Console.ReadKey(); }
private void Window_Loaded( object sender, RoutedEventArgs e ) { try { // Kinectを開く kinect = KinectSensor.GetDefault(); kinect.Open(); // 音声リーダーを開く audioBeamFrameReader = kinect.AudioSource.OpenReader(); audioBeamFrameReader.FrameArrived += audioBeamFrameReader_FrameArrived; } catch ( Exception ex ) { MessageBox.Show( ex.Message ); Close(); } }
public FrameReader() { _sensor = KinectSensor.GetDefault(); if (_sensor == null) { throw new ScannerNotFoundException("No valid plugged-in Kinect sensor found."); } if (!_sensor.IsOpen) { _sensor.Open(); } _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; }
public void ListenForFrames() { sensor = KinectSensor.GetDefault(); depthFrameReader = sensor.DepthFrameSource.OpenReader(); depthFrameReader.FrameArrived += Reader_FrameArrived; depthFrameDescription = sensor.DepthFrameSource.FrameDescription; // allocate space to put the pixels being received and converted depthPixels = new ushort[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; sensor.Open(); Console.WriteLine("Extractor ready to grab frames."); }
public KinectProducer(JointType[] jointsofinterest) { WorkerFunction = new ThreadStart(this.StartForWorkerThread); WorkerThread = new Thread(WorkerFunction); JointsOfInterest = jointsofinterest; Kinect = KinectSensor.GetDefault(); FrameDescription fd = Kinect.DepthFrameSource.FrameDescription; Kinect.Open(); //Kinect.Open(); Thread.Sleep(200); //Kinect.Close(); if (this.Kinect.IsOpen) { BFReader = Kinect.BodyFrameSource.OpenReader(); } //else // this.Kinect.IsAvailableChanged. //Kinect.Close(); }
/// <summary> /// Active the Kinect v2 sensor /// </summary> public void Open() { sensor.Open(); }