void Awake() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader = _Sensor.BodyFrameSource.OpenReader(); if (!_Sensor.IsOpen) { _Sensor.Open(); } } bodyCount = _Sensor.BodyFrameSource.BodyCount; FaceFrameFeatures faceFrameFeatures = FaceFrameFeatures.RotationOrientation; faceFrameSources = new FaceFrameSource[bodyCount]; faceFrameReaders = new FaceFrameReader[bodyCount]; avatarBodies = new Avatar.Body[bodyCount]; for (int i = 0; i < bodyCount; i++) { faceFrameSources[i] = FaceFrameSource.Create(_Sensor, 0, faceFrameFeatures); faceFrameReaders[i] = faceFrameSources[i].OpenReader(); } for (int i = 0; i < bodyCount; i++) { avatarBodies[i] = new Avatar.Body(); for (JointType jt = JointType.SpineBase; jt <= JointType.ThumbRight; jt++) { avatarBodies[i].Joints[jt] = new Avatar.Joint(); avatarBodies[i].Joints[jt].JointType = jt; } } }
public BodyMeasurements() { rosSocket = new RosSocket(new RosSharp.RosBridgeClient.Protocols.WebSocketNetProtocol(uri)); rosBMMeasurements_Id = rosSocket.Advertise <bm_msgs.bm_message>("/bm_MeasurementsChatter"); rosBMImage_Id = rosSocket.Advertise <bm_imsgs.Image>("/bm_ImagesChatter"); rosBMFits_Id = rosSocket.Advertise <bm_msgs.bm_fits>("/bm_FitsChatter"); string rosBMSizeFit_Id = rosSocket.Subscribe <bm_msgs.bm_sizefit>("/bm_SizeFitChatter", SizeFitSubscriptionHandler); //rosBMSizeFit_Id = rosSocket.Subscribe<bm_msgs.bm_sizefit>("/bm_SizeFitChatter", SizeFitSubscriptionHandler); InitializeComponent(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.BodyIndex | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }
public MainWindow() { //offical theme music Player.Stream = Properties.Resources.Schooldays_intro; { Player.PlayLooping(); } //sleeping to make the splash screen last longer Thread.Sleep(4000); InitializeComponent(); //turning on the kinect on program start up. //show person's body, create kinect movement area via xaml & cs KinectRegion.SetKinectRegion(this, KinectArea); App app = ((App)Application.Current); app.KinectRegion = KinectArea; //changing cursor app.KinectRegion.CursorSpriteSheetDefinition = new CursorSpriteSheetDefinition(new System.Uri("pack://application:,,,/Images/CursorSpriteSheetPurple.png"), 4, 20, 137, 137); // Use the default sensor this.KinectArea.KinectSensor = KinectSensor.GetDefault(); }
void Start() { int width = 0, height = 0; _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Mapper = _Sensor.CoordinateMapper; _CameraIntrinsics = _Mapper.GetDepthCameraIntrinsics(); var frameDesc = _Sensor.DepthFrameSource.FrameDescription; width = frameDesc.Width; height = frameDesc.Height; // Downsample to lower resolution _TrianglesTemplate = CreateMesh(frameDesc.Width / _DownsampleSize, frameDesc.Height / _DownsampleSize); if (!_Sensor.IsOpen) { _Sensor.Open(); } } // must be greater than 0, less or equal to 2048 and a multiple of 4. _Buffer = new ComputeBuffer(_TrianglesTemplate.Length / 6, 60); }
public Kinectv2() { _sensor = KinectSensor.GetDefault(); if (_sensor == null) { return; } _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader( FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body | FrameSourceTypes.BodyIndex); if (_sensor == null) { return; } MostRecentColorFrame = new byte[CalculateImageByteCount(ColorFrameDescription)]; MostRecentDepthFrame = new byte[CalculateImageByteCount(DepthFrameDescription)]; MostRecentInfraredFrame = new byte[CalculateImageByteCount(InfraredFrameDescription)]; MostRecentSilhouetteFrame = new byte[CalculateImageByteCount(SilhouetteFrameDescription)]; MostRecentGestures = new GestureResults(_sensor.BodyFrameSource.BodyCount); ColorFrameSize = CalculateImageSize(ColorFrameDescription); DepthFrameSize = CalculateImageSize(DepthFrameDescription); InfraredFrameSize = CalculateImageSize(InfraredFrameDescription); SilhouetteFrameSize = CalculateImageSize(SilhouetteFrameDescription); _gestureTrackers = new List <GestureTracker>(); }
void MainWindow_Loaded(object sender, RoutedEventArgs e) { this.kinectRegion.KinectSensor = KinectSensor.GetDefault(); kinectRegion.KinectSensor.IsAvailableChanged += (s, args) => OnPropertyChanged("KinectStatus"); FrameDescription fd = this.kinectRegion.KinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); colorBitmap = new WriteableBitmap(fd.Width, fd.Height, 96.0, 96.0, PixelFormats.Bgr32, null); msfr = this.kinectRegion.KinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body | FrameSourceTypes.Color | FrameSourceTypes.Depth); msfr.MultiSourceFrameArrived += msfr_MultiSourceFrameArrived; kviewer = new KinectUserViewer() { HorizontalAlignment = HorizontalAlignment.Left, VerticalAlignment = VerticalAlignment.Bottom, Height = 125, Width = 125, }; canvas.Children.Add(kviewer); //is it possible to add more than one child in a canvas? YES this.kinectRegion.KinectSensor.Open(); image.Source = colorBitmap; }
void IFaceCamera <System.Drawing.PointF> .Start() { _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.IsAvailableChanged += OnKinectSensorChanged; _bodySource = _sensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += OnBodyReaderFrameArrived; _faceSourceHighDef = new HighDefinitionFaceFrameSource(_sensor); _faceReaderHighDef = _faceSourceHighDef.OpenReader(); _faceReaderHighDef.FrameArrived += OnFaceReaderHighDefFrameArrived; _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.RightEyeClosed); _faceSource.TrackingIdLost += _faceSource_TrackingIdLost; _faceSourceHighDef.TrackingIdLost += _faceSource_TrackingIdLost; _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += OnFaceReaderFrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); _sensor.Open(); } }
static void Main(string[] args) { //var xr3d = new XR3DBroadcaster(); //xr3d.StartBroadcast(); //if (DeviceAvailable(Device.LeapMotion)) //{ // var leap = new LeapMotionBroadcaster(); // leap.StartBroadcast(); //} //var audio = new StreamingAudio(); //audio.StartBroadcast(); if (DeviceAvailable(Device.MicrosoftKinect)) { var kinect = KinectSensor.GetDefault(); var reader = kinect.BodyFrameSource.OpenReader(); reader.FrameArrived += Reader_FrameArrived; kinect.Open(); } Console.ReadLine(); }
public NewWall() { // initialize Kinect object kinectSensor = KinectSensor.GetDefault(); // activate sensor if (kinectSensor != null) { kinectSensor.Open(); Console.WriteLine("Kinect Activated"); mulSourceReader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); mulSourceReader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; colorWidth = KinectExtensions.frameDimensions[SpaceMode.Color].Item1; colorHeight = KinectExtensions.frameDimensions[SpaceMode.Color].Item2; depthWidth = KinectExtensions.frameDimensions[SpaceMode.Depth].Item1; depthHeight = KinectExtensions.frameDimensions[SpaceMode.Depth].Item2; colorMappedToDepthSpace = new DepthSpacePoint[(int)(colorWidth * colorHeight)]; lastNotNullDepthData = new ushort[(int)depthWidth * (int)depthHeight]; lastNotNullColorData = new byte[(int)colorWidth * (int)colorHeight * PixelFormats.Bgr32.BitsPerPixel / 8]; bitmap = new WriteableBitmap((int)depthWidth, (int)depthHeight, 96.0, 96.0, PixelFormats.Bgra32, null); // Calculate the WriteableBitmap back buffer size bitmapBackBufferSize = (uint)((bitmap.BackBufferStride * (bitmap.PixelHeight - 1)) + (bitmap.PixelWidth * bytesPerPixel)); } InitializeComponent(); // set navHead newWallNo = WallDataAccess.LargestWallNo + 1; navHead.HeaderRowTitle = string.Format("Scan KinectWall - {0}", newWallNo); navHead.ParentPage = this; }
public AnglePage() { //this.initiateCharts(); InitializeComponent(); lblInfo.Text = "Pronto!"; _interestPoints = initializeInterestPoints(); this.InitializeColumnsCsv(); _sensor = KinectSensor.GetDefault(); _posicoes = new List <List <Vector3> >(); if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; _playersController = new PlayersController(); _playersController.BodyEntered += UserReporter_BodyEntered; _playersController.BodyLeft += UserReporter_BodyLeft; _playersController.Start(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { //Open Face_Features的頁面 show_face_features.Show(); _sensor = KinectSensor.GetDefault(); if (_sensor != null) { _sensor.Open(); _bodies = new Body[_sensor.BodyFrameSource.BodyCount]; _colorReader = _sensor.ColorFrameSource.OpenReader(); _colorReader.FrameArrived += ColorReader_FrameArrived; _bodyReader = _sensor.BodyFrameSource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // 2) Initialize the face source with the desired features _faceSource = new FaceFrameSource(_sensor, 0, FaceFrameFeatures.BoundingBoxInColorSpace | FaceFrameFeatures.FaceEngagement | FaceFrameFeatures.Glasses | FaceFrameFeatures.Happy | FaceFrameFeatures.LeftEyeClosed | FaceFrameFeatures.MouthOpen | FaceFrameFeatures.PointsInColorSpace | FaceFrameFeatures.RightEyeClosed | FaceFrameFeatures.LookingAway | FaceFrameFeatures.MouthMoved | FaceFrameFeatures.RotationOrientation ); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; } }
public MainForm() { InitializeComponent(); // fileNameTextBox.Text = "balltest.png"; // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); SetupCurrentDisplay(DEFAULT_DISPLAYFRAMETYPE); this.coordinateMapper = this.kinectSensor.CoordinateMapper; // this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived; // set IsAvailableChanged event notifier //this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); }
public MainPage() { // one sensor is currently supported this.kinectSensor = KinectSensor.GetDefault(); SetupCurrentDisplay(DEFAULT_DISPLAYFRAMETYPE); this.coordinateMapper = this.kinectSensor.CoordinateMapper; this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Infrared | FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); this.multiSourceFrameReader.MultiSourceFrameArrived += this.Reader_MultiSourceFrameArrived; // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // use the window object as the view model in this simple example this.DataContext = this; // open the sensor this.kinectSensor.Open(); this.InitializeComponent(); }
void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Mapper = _Sensor.CoordinateMapper; depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription; depthWidth = depthFrameDesc.Width; depthHeight = depthFrameDesc.Height; if (!_Sensor.IsOpen) { _Sensor.Open(); } particles = new ParticleSystem.Particle[depthWidth * depthHeight]; _particleSystem = gameObject.GetComponent <ParticleSystem>(); _MultiManager = MultiSourceManager.GetComponent <MultiSourceManager>(); cameraSpacePoints = new CameraSpacePoint[depthWidth * depthHeight]; //gameObject.GetComponent<Renderer>().material.mainTexture = _MultiManager.GetColorTexture(); } }
void MainWindow_Loaded(object sender, RoutedEventArgs e) { // Kinectへの接続 try { kinect = KinectSensor.GetDefault(); if (kinect == null) { throw new Exception("Cannot open kinect v2 sensor."); } checkText2.Text = "Connecting Kinect v2 sensor"; kinect.Open(); // 初期設定 Initialize(); } catch (Exception ex) { MessageBox.Show(ex.ToString()); checkText2.Text = "Disconnect Kinect v2 sensor"; Close(); } }
private void onWindowLoad(object sender, RoutedEventArgs e) { canvas.Width = this.Width * 0.8; canvas.Height = this.Height; _kinectSensor = KinectSensor.GetDefault(); _kinectSensor.Open(); // the open call above is actually async so this has it wait a bit so // the error message doesn't show up unecessarily System.Threading.Thread.Sleep(1000); while (!_kinectSensor.IsAvailable) { // the result of the message box MessageBoxResult result = MessageBox.Show( "Please ensure a Kinect v2 is connected.\n\nDo you want to try again?", "No Kinect Detected", MessageBoxButton.OKCancel, MessageBoxImage.Error); if (result == MessageBoxResult.Cancel) { Application.Current.Shutdown(); // by default shutdown doesn't leave the function immediately // return ensures that it won't try to finish the constructor return; } else { _kinectSensor = KinectSensor.GetDefault(); _kinectSensor.Open(); System.Threading.Thread.Sleep(1000); } } KinectHDFace FaceTracker = new KinectHDFace(_kinectSensor, canvas); }
/// <summary> /// <summary> /// Initializes a new instance of the <see cref="MainWindow"/> class. /// </summary> public MainWindow() { this.InitializeComponent(); KinectRegion.SetKinectRegion(this, kinectRegion); App app = ((App)Application.Current); app.KinectRegion = kinectRegion; // Use the default sensor this.kinectRegion.KinectSensor = KinectSensor.GetDefault(); //// Add in display content var sampleDataSource = SampleDataSource.GetGroup("Group-1"); this.itemsControl.ItemsSource = sampleDataSource; //RecognizeSpeechAndWriteToConsoleMain1(); System.Windows.Threading.DispatcherTimer myDispatcherTimer = new System.Windows.Threading.DispatcherTimer(); myDispatcherTimer.Interval = new TimeSpan(0, 0, 0, 0, 100); // 100 Milliseconds myDispatcherTimer.Tick += myDispatcherTimer_Tick; myDispatcherTimer.Start(); }
// Use this for initialization void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Mapper = _Sensor.CoordinateMapper; var frameDesc = _Sensor.DepthFrameSource.FrameDescription; _Reader = _Sensor.DepthFrameSource.OpenReader(); _Data = new ushort[_Sensor.DepthFrameSource.FrameDescription.LengthInPixels]; colorSpace = new ColorSpacePoint[_Data.Length]; camSpace = new CameraSpacePoint[_Data.Length]; // Downsample to lower resolution CreateMesh(frameDesc.Width / _DownsampleSize, frameDesc.Height / _DownsampleSize); if (!_Sensor.IsOpen) { _Sensor.Open(); } } }
private void Window_Loaded(Object sender, RoutedEventArgs e) { //timer = new Timer(); //timer.Interval = 1000;//1s //timer.Tick += new EventHandler(Timer_Tick); //timer.Start(); //try //{ //kinect設備獲取 kinect = KinectSensor.GetDefault(); kinect.Open(); //開啟Reader bodyFrameReader = kinect.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += Reader_BodyFrameArrived; Story.Visibility = Visibility.Visible; voice.Source = new Uri("C:\\Users\\Hua\\Desktop\\voice\\background.mp3"); voice.Play(); //創建一個顏色陣列 用來染色人體索引位置(最多總計6人) //bodyIndexColors = new Color[] { Colors.Red, Colors.Blue, Colors.Green, Colors.Yellow, Colors.Pink, Colors.Purple }; timer = new Timer(); timer.Interval = 1000;//1s timer.Tick += new EventHandler(Timer_Tick); timer.Start(); //} //catch (Exception ex) //{ // MessageBox.Show(ex.Message); // Close(); //} }
public override bool TurnOnDevice() { kinectSensor = KinectSensor.GetDefault(); if (kinectSensor != null) { foreach (var viewer in viewers) { viewer.SetupReader(kinectSensor); } bodyReader = kinectSensor.BodyFrameSource.OpenReader(); if (!kinectSensor.IsOpen) { kinectSensor.Open(); } return(kinectSensor.IsAvailable && kinectSensor.IsOpen); } return(false); }
private void StartTraceableMode() { SetUIInTrackingMode(); //ExerciseVideo.Play(); _sensor = KinectSensor.GetDefault(); //todo - check, not working if (_sensor != null) { _sensor.Open(); // 2) Initialize the background removal tool. _backgroundRemovalTool = new BackgroundRemovalTool(_sensor.CoordinateMapper); _drawSkeleton = new DrawSkeleton(_sensor, (int)(KinectSkeleton.Width), (int)(KinectSkeleton.Height)); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.BodyIndex | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; //Gesture detection Exercise tempExercise = CurrentExercise; _gestureAnalysis = new GestureAnalysis(ref tempExercise); _gestureDetector = new GestureDetector(_sensor, _gestureAnalysis, CurrentExercise); _gestureAnalysis.startGestureDeteced += _gestureAnalysis_startGestureDeteced; CurrentExercise.CreateRounds(); //_timer.Start(); } ExerciseVideo.Play(); inPlayMode = true; }
public static KinectV2StatusEventArgs[] GetAllKinectsStatus() { //For the time being, this will always be an array with one element because of the one Kinect v2 per computer limit KinectV2StatusEventArgs[] statusArray = new KinectV2StatusEventArgs[1]; for (int i = 0; i < 1; i++) { KinectV2StatusEventArgs temp = new KinectV2StatusEventArgs(); temp.KinectNumber = i; KinectSensor tempKinect = KinectSensor.GetDefault(); //It looks like the service can be opened at any time, and only becomes available if there is actually a Kinect attached to the computer temp.UniqueKinectID = tempKinect.UniqueKinectId; if (tempKinect.IsAvailable) { temp.Status = KinectBase.KinectStatus.Connected; System.Diagnostics.Debug.WriteLine("Kinect 2 connected (static method)."); } else { if (tempKinect.IsOpen) { temp.Status = KinectBase.KinectStatus.NotReady; System.Diagnostics.Debug.WriteLine("Kinect 2 not ready (static method)."); } else { temp.Status = KinectBase.KinectStatus.Disconnected; System.Diagnostics.Debug.WriteLine("Kinect 2 disconnected (static method)."); } } statusArray[i] = temp; } return(statusArray); }
static void Main(string[] args) { // Create the list of joints //GenerateBones(); connectToDatabase(); sensor = KinectSensor.GetDefault(); sensor.Open(); BodyFrameReader bfr = sensor.BodyFrameSource.OpenReader(); bfr.FrameArrived += bfr_FrameArrived; Console.WriteLine("Running."); using (NetMQContext context = NetMQContext.Create()) { Task serverTask = Task.Factory.StartNew(() => StartServerNetMq(context)); Task.WaitAll(serverTask); } Thread.Sleep(Timeout.Infinite); }
/// <summary> /// Initializes a new instance of the MainWindow class. /// </summary> public MainWindow() { // get the kinectSensor object this.kinectSensor = KinectSensor.GetDefault(); // open the reader for the depth frames this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader(); // wire handler for frame arrival this.depthFrameReader.FrameArrived += this.Reader_FrameArrived; // get FrameDescription from DepthFrameSource this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; // allocate space to put the pixels being received and converted this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; // create the bitmap to display this.depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); // set IsAvailableChanged event notifier this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; // open the sensor this.kinectSensor.Open(); // set the status text this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText : Properties.Resources.NoSensorStatusText; // use the window object as the view model in this simple example this.DataContext = this; // initialize the components (controls) of the window this.InitializeComponent(); }
/// <summary> /// コンストラクタ。実行時に一度だけ実行される。 /// </summary> public MainWindow() { InitializeComponent(); this.kinect = KinectSensor.GetDefault(); //読み込むデータのフォーマットを設定する。 this.colorFrameDescription = this.kinect.ColorFrameSource .CreateFrameDescription(ColorImageFormat.Bgra); this.depthFrameDescription = this.kinect.DepthFrameSource.FrameDescription; //複数のデータを読み込むリーダを用意する。 //読み込むデータの種類を指定する。 this.multiSourceFrameReader = this.kinect.OpenMultiSourceFrameReader (FrameSourceTypes.Color | FrameSourceTypes.Depth); this.multiSourceFrameReader.MultiSourceFrameArrived += MultiSourceFrameReader_MultiSourceFrameArrived; this.kinect.Open(); }
// Use this for initialization void Start() { terrCollider = GetComponent <TerrainCollider>(); terrData = terrCollider.terrainData; _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader = _Sensor.DepthFrameSource.OpenReader(); _Reader.FrameArrived += _Reader_FrameArrived; _Data = new ushort[_Sensor.DepthFrameSource.FrameDescription.LengthInPixels]; Debug.LogFormat("Depth:H:{0} x W:{1}", _Sensor.DepthFrameSource.FrameDescription.Height, _Sensor.DepthFrameSource.FrameDescription.Width); Debug.LogFormat("TerrData:H:{0} x W:{1}", terrData.heightmapHeight, terrData.heightmapWidth); minDist = _Sensor.DepthFrameSource.DepthMinReliableDistance; maxDist = 1500f;// _Sensor.DepthFrameSource.DepthMaxReliableDistance; scale = 0.1f / (maxDist - minDist); Debug.LogFormat("Depth:Min:{0} x Max:{1} (Scale:{2})", minDist, maxDist, scale); mapper = _Sensor.CoordinateMapper; _Sensor.Open(); } logFile = new System.IO.StreamWriter(new System.IO.FileStream("e:\\log.txt", System.IO.FileMode.OpenOrCreate)); Debug.LogFormat("Scale:{0}", terrData.heightmapScale); data = new float[terrData.heightmapHeight, terrData.heightmapWidth]; }
void Start() { _Sensor = KinectSensor.GetDefault(); if (_Sensor != null) { _Reader = _Sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); var colorFrameDesc = _Sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Rgba); ColorWidth = colorFrameDesc.Width; ColorHeight = colorFrameDesc.Height; _ColorTexture = new Texture2D(colorFrameDesc.Width, colorFrameDesc.Height, TextureFormat.RGBA32, false); _ColorData = new byte[colorFrameDesc.BytesPerPixel * colorFrameDesc.LengthInPixels]; var depthFrameDesc = _Sensor.DepthFrameSource.FrameDescription; _DepthData = new ushort[depthFrameDesc.LengthInPixels]; if (!_Sensor.IsOpen) { _Sensor.Open(); } } }
public MainWindow() { this.kinectSensor = KinectSensor.GetDefault(); //this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader(); //this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader(); this.multiSourceFrameReader = this.kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth); this.multiSourceFrameReader.MultiSourceFrameArrived += this.MultiSourceFrameReader_MultiSourceFrameArrived; //this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; //this.depthFrameReader.FrameArrived += this.Reader_DepthFrameArrived; this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; this.depthPixels = new byte[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); this.depthBitmap = new WriteableBitmap(depthFrameDescription.Width, depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged; this.kinectSensor.Open(); this.DataContext = this; this.record = false; this.InitializeComponent(); CountDown.Content = alarmCounter.ToString(); myTimer.Tick += new EventHandler(TimerEventProcessor); myTimer.Interval = 1000; }
private void MainWindow_Loaded(object sender, RoutedEventArgs e) { for (int i = 0; i < this.bodyCount; i++) { if (this.faceFrameReaders[i] != null) { // wire handler for face frame arrival this.faceFrameReaders[i].FrameArrived += this.Reader_FaceFrameArrived; } } if (this.bodyFrameReader != null) { // wire handler for body frame arrival this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived; } kinectSensor = KinectSensor.GetDefault(); if (kinectSensor != null) { _bodySource = kinectSensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; _faceSource = new HighDefinitionFaceFrameSource(kinectSensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); kinectSensor.Open(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { _sensor = KinectSensor.GetDefault(); facex = new double[10]; facey = new double[10]; health = new Dictionary <ulong, int>(); charge = new Dictionary <ulong, int>(); atkState = new Dictionary <ulong, int>(); for (int i = 0; i < 10; i++) { facex[i] = 0; facey[i] = 0; } this.coordinateMapper = _sensor.CoordinateMapper; if (_sensor != null) { _sensor.Open(); _reader = _sensor.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); _reader.MultiSourceFrameArrived += Reader_MultiSourceFrameArrived; } }