public MainWindow() { // Get a reference to the Kinect sensor and turn it on sensor = KinectSensor.GetDefault(); sensor.Open(); // Get a reference to the color and body sources colorFrameSource = sensor.ColorFrameSource; bodyFrameSource = sensor.BodyFrameSource; // Open the readers for each of the sources colorFrameReader = sensor.ColorFrameSource.OpenReader(); bodyFrameReader = sensor.BodyFrameSource.OpenReader(); // Create event handlers for each of the readers colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived; // Get ready to draw graphics drawingGroup = new DrawingGroup(); // Initialize fruit location, velocity, and size fruitPoint = new Point(0, colorFrameSource.FrameDescription.Height); fruitVelocity = new Vector(15, -30); fruitSize = 70; // Initialize a random generator randomGenerator = new Random(); // Tell the UI to get ready to be controlled InitializeComponent(); }
public KinectVizView() { InitializeComponent(); // clean up Dispatcher.ShutdownStarted += Dispatcher_ShutdownStarted; // get the input of the color frames this.colorFrameSource = KinectModel.Instance.ColorFrameSource; // open the reader for the color frames this.colorFrameReader = this.colorFrameSource.OpenReader(); // wire handler for frame arrival this.colorFrameReader.FrameArrived += this.Reader_ColorFrameArrived; // create the colorFrameDescription from the ColorFrameSource using Bgra format FrameDescription colorFrameDescription = this.colorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); // create the bitmap to display this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); // use the window object as the view model in this simple example this.DataContext = this; }
/// <summary> /// Convert color camera coordinates to arbitrary coordinates /// </summary> /// <param name="p">Point to convert</param> /// <param name="size">Size of the new region</param> /// <returns>Scaled point</returns> public Point ColorToInterface(Point p, Size size) { ColorFrameSource c = sensor.ColorFrameSource; return(ScaleToSize(p, new Size(c.FrameDescription.Width, c.FrameDescription.Height), size)); }
public MainWindow() { sensor = KinectSensor.GetDefault(); sensor.Open(); colorFrameSource = sensor.ColorFrameSource; colorFrameReader = sensor.ColorFrameSource.OpenReader(); colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; colorFrameSource = sensor.ColorFrameSource; bodyFrameSource = sensor.BodyFrameSource; // Open the readers for each of the sources colorFrameReader = sensor.ColorFrameSource.OpenReader(); bodyFrameReader = sensor.BodyFrameSource.OpenReader(); // Create event handlers for each of the readers colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived; // Get ready to draw graphics drawingGroup = new DrawingGroup(); BallNum = 2; // Initialize Ball location, velocity, and size BallPoints[0].X = 0; BallPoints[0].Y = colorFrameSource.FrameDescription.Height; BallVelocities[0].X = 40; BallVelocities[0].Y = 80; BallPoints[1].X = 0; BallPoints[1].Y = colorFrameSource.FrameDescription.Height; BallVelocities[1].X = 45; BallVelocities[1].Y = 79; BallSize = 70; headSize = 250; handSize = 125; // Initialize a random generator randomGenerator = new Random(); InitializeComponent(); }
public MainWindow() { kinect = KinectSensor.GetDefault(); ColorFrameSource colorFrameSource = kinect.ColorFrameSource; colorFrameDesc = colorFrameSource.FrameDescription; ColorFrameReader colorFrameReader = colorFrameSource.OpenReader(); colorFrameReader.FrameArrived += Color_FrameArrived; colorBitmap = new WriteableBitmap(colorFrameDesc.Width, colorFrameDesc.Height, 96.0, 96.0, PixelFormats.Gray8, null); DataContext = this; kinect.Open(); InitializeComponent(); }
internal static byte[] GetNewPixelArray(this ColorFrameSource colorFrameSource) { return(new byte[colorFrameSource.FrameDescription.LengthInPixels * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)]); }
private void MainWindow_Loaded(object sender, RoutedEventArgs e) { binaryFilePath = System.IO.Path.Combine(Environment.CurrentDirectory, "data.bin"); this.kinectSensor = KinectSensor.GetDefault(); if (this.kinectSensor != null) { this.kinectSensor.Open(); #region ColorFrame this.colorFrameSource = this.kinectSensor.ColorFrameSource; this.colorFrameReader = this.colorFrameSource.OpenReader(); this.colorFrameDescription = this.colorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); this.colorFrameReader.FrameArrived += colorFrameReader_FrameArrived; this.colorPixelData = new Byte[this.colorFrameDescription.LengthInPixels * 4]; this.colorBitmap = new WriteableBitmap(this.colorFrameDescription.Width, this.colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null); this.colorBitmapRect = new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight); this.colorBitmapStride = this.colorFrameDescription.Width * 4; #endregion #region DepthFrame this.depthFrameSource = this.kinectSensor.DepthFrameSource; this.depthFrameReader = this.depthFrameSource.OpenReader(); this.depthFrameDesription = this.kinectSensor.DepthFrameSource.FrameDescription; this.depthFrameReader.FrameArrived += depthReader_FrameArrived; this.depthBitmap = new WriteableBitmap(this.depthFrameDesription.Width, this.depthFrameDesription.Height, 96.0, 96.0, PixelFormats.Gray16, null); this.depthBitmapRect = new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight); this.depthPixelData = new ushort[this.depthFrameDesription.LengthInPixels]; //取代宽度*高度,更加方便 this.depthStride = this.depthFrameDesription.Width * 2; #endregion #region FacePoints // Listen for body data. _bodySource = this.kinectSensor.BodyFrameSource; _bodyReader = _bodySource.OpenReader(); _bodyReader.FrameArrived += BodyReader_FrameArrived; // Listen for HD face data. _faceSource = new HighDefinitionFaceFrameSource(this.kinectSensor); _faceReader = _faceSource.OpenReader(); _faceReader.FrameArrived += FaceReader_FrameArrived; _faceModel = new FaceModel(); _faceAlignment = new FaceAlignment(); #endregion abstractColorFrame.buffer = new byte[1920 * 1080 * 4]; abstractDepthFrame.buffer = new ushort[512 * 424]; abstractFacePointsFrame.buffer = new CameraSpacePoint[1347]; colorImage.Source = this.colorBitmap; depthImage.Source = this.depthBitmap; queueSaver = new QueueSaver(binaryFilePath); mainWritingThread = new Thread(new ThreadStart(saveAll2File)); mainWritingThread.Start(); string txtPath = System.IO.Path.Combine(Environment.CurrentDirectory, "word.txt"); if (File.Exists(txtPath)) { wordReader = new StreamReader(txtPath, Encoding.Default); } else { if (MessageBox.Show("提词器文本文件找不到!") == MessageBoxResult.OK) { Application.Current.Shutdown(); } } if (!wordReader.EndOfStream) { this.txtBlock_narrator.Text = wordReader.ReadLine(); } else { this.txtBlock_narrator.Text = "采集结束。"; this.btn_startRecording.IsEnabled = false; } } }