static void AllFramesReady(Object sender, AllFramesReadyEventArgs e)
        {
            //  Console.WriteLine("AllFramesReady() called");
            if (e != null)
            {
                SkeletonFrame sFrame = e.OpenSkeletonFrame();


                CoordinateMapper cm = new CoordinateMapper(sensor);
                if (sFrame != null)
                {
                    //  Console.WriteLine("Got Skeleton");
                    Skeleton[] skeletons = new Skeleton[sFrame.SkeletonArrayLength];
                    sFrame.CopySkeletonDataTo(skeletons);
                    SkeletonPoint sLoc = new SkeletonPoint();
                    foreach (Skeleton s in skeletons)
                    {
                        if (s.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            sLoc = s.Joints[JointType.Head].Position;
                            DepthFrameReady(sender, e.OpenDepthImageFrame(), e.OpenColorImageFrame(), sLoc);
                            //  Console.WriteLine("Head coordinates: " + sLoc.X + "," + sLoc.Y + "," + sLoc.Z);
                            ColorImagePoint cLoc = cm.MapSkeletonPointToColorPoint(sLoc, ColorImageFormat.RgbResolution640x480Fps30);
                            DepthImagePoint dLoc = cm.MapSkeletonPointToDepthPoint(sLoc, DepthImageFormat.Resolution640x480Fps30);
                            Console.WriteLine("Head coordinates: " + dLoc.X + "," + dLoc.Y);
                        }
                    }
                    sFrame.Dispose();
                }

                if (block == false)
                {
                    dp   = new DepthImagePoint();
                    cp   = new ColorImagePoint();
                    cp.X = 320;
                    cp.Y = 240;
                    dp.X = 320;
                    dp.Y = 240;
                    ////cp = cLoc;
                    //cp = cm.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, dp, ColorImageFormat.RgbResolution640x480Fps30);
                    Console.WriteLine(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" + cp.X + "    " + cp.Y);
                    //dp = dLoc;
                }
                // ColorImagePoint cLoc = sensor.MapSkeletonPointToColor(sLoc, e.OpenColorImageFrame().Format);
                // nearX = dLoc.X;
                // nearY = dLoc.Y;
                nearX = dp.X;
                nearY = dp.Y;
                ImageFrameReady(sender, e.OpenColorImageFrame());
            }
        }
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                int stride = colorFrame.Width * 4;

                camera.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96,
                                                    PixelFormats.Bgr32, null, pixels, stride);
            }

            cur_skeleton = GetFirstSkeleton(e);
            if (cur_skeleton == null)
            {
                return;
            }
            check_hand_gesture();
            if (cur_clothes != null)
            {
                adjust_clothes_position(cur_clothes);
            }
        }
        void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            bool gotColor = false;
            bool gotDepth = false;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    Debug.Assert(colorFrame.Width == 640 && colorFrame.Height == 480, "This app only uses 640x480.");

                    if (_colorPixels.Length != colorFrame.PixelDataLength)
                    {
                        _colorPixels      = new byte[colorFrame.PixelDataLength];
                        _bitmap           = new WriteableBitmap(640, 480, 96.0, 96.0, PixelFormats.Bgr32, null);
                        _bitmapBits       = new byte[640 * 480 * 4];
                        this.Image.Source = _bitmap;
                    }

                    colorFrame.CopyPixelDataTo(_colorPixels);
                    gotColor = true;
                }
            }

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    Debug.Assert(depthFrame.Width == 640 && depthFrame.Height == 480, "This app only uses 640x480.");

                    if (_depthPixels.Length != depthFrame.PixelDataLength)
                    {
                        _depthPixels          = new short[depthFrame.PixelDataLength];
                        _mappedDepthLocations = new ColorImagePoint[depthFrame.PixelDataLength];
                    }

                    depthFrame.CopyPixelDataTo(_depthPixels);
                    gotDepth = true;
                }
            }

            // Put the color image into _bitmapBits
            for (int i = 0; i < _colorPixels.Length; i += 4)
            {
                _bitmapBits[i + 3] = 255;
                _bitmapBits[i + 2] = _colorPixels[i + 2];
                _bitmapBits[i + 1] = _colorPixels[i + 1];
                _bitmapBits[i]     = _colorPixels[i];
            }

            this._sensor.MapDepthFrameToColorFrame(DepthImageFormat.Resolution640x480Fps30, _depthPixels, ColorImageFormat.RgbResolution640x480Fps30, _mappedDepthLocations);

            for (int i = 0; i < _depthPixels.Length; i++)
            {
                int             depthVal = _depthPixels[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                ColorImagePoint point    = _mappedDepthLocations[i];
            }

            _bitmap.WritePixels(new Int32Rect(0, 0, _bitmap.PixelWidth, _bitmap.PixelHeight), _bitmapBits, _bitmap.PixelWidth * sizeof(int), 0);
        }
        private void AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                colorFrame.CopyPixelDataTo(ColorPixels);

                ColorBitmap.WritePixels(
                    new Int32Rect(0, 0, ColorBitmap.PixelWidth, ColorBitmap.PixelHeight),
                    ColorPixels,
                    ColorBitmap.PixelWidth * sizeof(int),
                    0);

                DetectMotion();
            }

            if (FrameUpdated != null)
            {
                FrameUpdated(this, new EventArgs());
            }
        }
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            using (var colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }

                // Make a copy of the color frame for displaying.
                var haveNewFormat = this.currentColorImageFormat != colorImageFrame.Format;
                if (haveNewFormat)
                {
                    this.currentColorImageFormat  = colorImageFrame.Format;
                    this.colorImageData           = new byte[colorImageFrame.PixelDataLength];
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    ColorImage.Source = this.colorImageWritableBitmap;
                }

                colorImageFrame.CopyPixelDataTo(this.colorImageData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImageData,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);
            }
        }
Beispiel #6
0
        void mykinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            depthframe = e.OpenDepthImageFrame();
            colorframe = e.OpenColorImageFrame();

            if (depthframe != null && colorframe != null)
            {
                depthpixelData = new short[depthframe.PixelDataLength];
                depthframe.CopyPixelDataTo(depthpixelData);
                _DepthImageBitmap.WritePixels(_DepthImageBitmapRect, depthpixelData, _DepthImageStride, 0);

                colorpixelData = new byte[colorframe.PixelDataLength];
                colorframe.CopyPixelDataTo(colorpixelData);

                if (depthpixelData != null)
                {
                    RangeFilter();
                }

                _ColorImageBitmap.WritePixels(_ColorImageBitmapRect, colorpixelData, _ColorImageStride, 0);

                depthframe.Dispose();
                colorframe.Dispose();
            }
        }
Beispiel #7
0
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            image1.Source = e.OpenColorImageFrame().ToBitmapSource();

            // スケルトンフレームを取得する
            SkeletonFrame skeletonFrame = e.OpenSkeletonFrame();
            if ( skeletonFrame != null ) {
                // スケルトンデータを取得する
                Skeleton[] skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                skeletonFrame.CopySkeletonDataTo( skeletonData );

                // プレーヤーごとのスケルトンを描画する
                foreach ( var skeleton in skeletonData ) {
                    var head = skeleton.Joints[JointType.Head];
                    if ( head.TrackingState == JointTrackingState.Tracked ) {
                        ColorImagePoint point = kinect.MapSkeletonPointToColor( head.Position, kinect.ColorStream.Format );
                        var x = image2.Width / 2;
                        var y = image2.Height / 2;

                        image2.Margin = new Thickness( point.X - x, point.Y - y, 0, 0 );
                        image2.Visibility = System.Windows.Visibility.Visible;
                    }
                }
            }
        }
        void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                int stride = colorFrame.Width * 4;

                imageRGB.Source =
                    BitmapSource.Create(colorFrame.Width,
                                        colorFrame.Height,
                                        96,
                                        96,
                                        PixelFormats.Bgr32,
                                        null,
                                        pixels,
                                        stride);
            }
        }
Beispiel #9
0
        /// <summary>
        /// RGBカメラ、距離カメラ、骨格のフレーム更新イベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            try {
                // Kinectのインスタンスを取得する
                KinectSensor kinect = sender as KinectSensor;
                if (kinect == null)
                {
                    return;
                }

                // ラジオボタンによって、背景をマスクするか、光学迷彩するかを決める
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                    using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) {
                        if ((colorFrame != null) && (depthFrame != null))
                        {
                            byte[] bits;
                            if (radioButtonOpticalCamouflage.IsChecked == true)
                            {
                                bits = OpticalCamouflage(kinect, colorFrame, depthFrame);
                            }
                            else
                            {
                                bits = BackgroundMask(kinect, colorFrame, depthFrame);
                            }

                            imageRgb.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96,
                                                                  PixelFormats.Bgr32, null, bits, colorFrame.Width * colorFrame.BytesPerPixel);
                        }
                    }
                }
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
            }
        }
        void mykinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            #region 基本彩色影像與深度影像處理
            depthframe = e.OpenDepthImageFrame();
            colorframe = e.OpenColorImageFrame();

            if (depthframe == null || colorframe == null)
            {
                return;
            }

            depthpixelData = new short[depthframe.PixelDataLength];
            depthframe.CopyPixelDataTo(depthpixelData);
            _DepthImageBitmap.WritePixels(_DepthImageBitmapRect, depthpixelData, _DepthImageStride, 0);
            depthPixel = new DepthImagePixel[depthframe.PixelDataLength];
            depthframe.CopyDepthImagePixelDataTo(depthPixel);

            colorpixelData = new byte[colorframe.PixelDataLength];
            colorframe.CopyPixelDataTo(colorpixelData);
            #endregion

            if (depthpixelData != null)
            {
                PlayerFilter();
                Alarm();
            }

            _ColorImageBitmap.WritePixels(_ColorImageBitmapRect, colorpixelData, _ColorImageStride, 0);
            depthframe.Dispose();
            colorframe.Dispose();
        }
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    // Create byte array with pixel data
                    byte[] pixels = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo(pixels);

                    // Bytes/row = 4 * with (for bgr32)
                    int stride = colorFrame.Width * 4;
                    // Display image
                    img_colorimage.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                }
            }
            // Auto dispose

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    // Set colors based on depth
                    byte[] pixels = GenerateColoredBytes(depthFrame);

                    // Bytes/row = 4 * with (for bgr32)
                    int stride = depthFrame.Width * 4;
                    // Display image
                    img_depthimage.Source = BitmapSource.Create(depthFrame.Width, depthFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                }
            }
            // Auto dispose
        }
        void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                int stride = colorFrame.Width * 4;

                imageRGB.Source =
                    BitmapSource.Create(colorFrame.Width,
                    colorFrame.Height,
                    96,
                    96,
                    PixelFormats.Bgr32,
                    null,
                    pixels,
                    stride);
            }
        }
Beispiel #13
0
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var frame = e.OpenColorImageFrame())
            {
                if (frame == null)
                {
                    return;
                }

                imageCanvas.Background = new ImageBrush(frame.ToBitmapSource());
            }

            using (var frame = e.OpenSkeletonFrame())
            {
                if (frame == null)
                {
                    return;
                }

                var skeletons = frame.GetSkeletons().Where(x => x.TrackingState == SkeletonTrackingState.Tracked).ToArray();
                skeletonDisplayManager.Draw(skeletons, false);

                var skeleton = skeletons.FirstOrDefault();
                if (skeleton != null)
                {
                    var handJoint = skeleton.Joints.First(x => x.JointType == JointType.HandRight);
                    gestureDetector.Add(handJoint.Position, sensor);
                    postureDetector.TrackPostures(skeleton);
                }
            }
        }
Beispiel #14
0
        /// <summary>
        /// RGBカメラ、距離カメラ、骨格のフレーム更新イベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            try {
                // Kinectのインスタンスを取得する
                KinectSensor kinect = sender as KinectSensor;
                if (kinect == null)
                {
                    return;
                }

                // 背景をマスクした画像を描画する
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                    using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) {
                        if ((colorFrame != null) && (depthFrame != null))
                        {
                            image.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96,
                                                               PixelFormats.Bgr32, null, BackgroundMask(kinect, colorFrame, depthFrame),
                                                               colorFrame.Width * colorFrame.BytesPerPixel);
                        }
                    }
                }
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
            }
        }
        void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            //throw new NotImplementedException();
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {

                if (colorFrame == null)
                {
                    return;
                }

                byte[] pixels = new byte[colorFrame.PixelDataLength];

                //copy data out into our byte array
                colorFrame.CopyPixelDataTo(pixels);

                int stride = colorFrame.Width * 4;

                image2.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                if (playerDepth > 500)
                {
                ScaleTransform scaly = new ScaleTransform(.5, .5);
                image3.RenderTransform = scaly;
                }

            }
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame == null)
                {
                    return;
                }

            }
        }
Beispiel #16
0
        //cuando el EventHandler capture datos los muestra en el ctrImage
        private void MiKinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // se elimina cada vez que se captura nuevo datos
            using (ColorImageFrame frameImage = e.OpenColorImageFrame())
            {
                if (frameImage == null) return;

                //datos que se reciben de la Kinect
                byte[] datosColor = new byte[frameImage.PixelDataLength];

                frameImage.CopyPixelDataTo(datosColor);

                //mostramos en el crtImage del XAML
                //ancho, alto, dpi horizontal, dpi verticales, formato de los pixels, paleta del bitmap, array de bytes que tienen la imagen, Strite
                ctrImg_mostrarImagenes.Source = BitmapSource.Create(
                    frameImage.Width, frameImage.Height,
                    96,
                    96,
                    PixelFormats.Bgr32,
                    null,
                    datosColor,
                    frameImage.Width * frameImage.BytesPerPixel
                    );


            }  
        }
        void mykinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            depthframe = e.OpenDepthImageFrame();
            colorframe = e.OpenColorImageFrame();

            if (depthframe == null || colorframe == null)
            {
                return;
            }

            depthpixelData = new short[depthframe.PixelDataLength];
            depthframe.CopyPixelDataTo(depthpixelData);
            _DepthImageBitmap.WritePixels(_DepthImageBitmapRect, depthpixelData, _DepthImageStride, 0);
            depthPixel = new DepthImagePixel[depthframe.PixelDataLength];
            depthframe.CopyDepthImagePixelDataTo(depthPixel);

            colorpixelData = new byte[colorframe.PixelDataLength];
            colorframe.CopyPixelDataTo(colorpixelData);

            if (depthpixelData != null)
            {
                UserBorderCaculation();
            }

            _ColorImageBitmap.WritePixels(_ColorImageBitmapRect, colorpixelData, _ColorImageStride, 0);

            depthframe.Dispose();
            colorframe.Dispose();
        }
Beispiel #18
0
        private void KSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var frame = e.OpenColorImageFrame())
                if (frame != null)
                {
                    pbKinectStream.Image = CreateBitmapFromSensor(frame);
                }
            //pour récupérer l'image de la kinect il faut créer un bitmap

            using (var frame = e.OpenSkeletonFrame())
            {
                //pour récupérer le squelette
                if (frame != null)
                {
                    var skeletons = new Skeleton[frame.SkeletonArrayLength];
                    frame.CopySkeletonDataTo(skeletons);

                    var TrackedSkeleton = skeletons.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);


                    if (TrackedSkeleton != null)
                    {
                        //récupérer la position de la main droite
                        var position = TrackedSkeleton.Joints[JointType.HandRight].Position;
                        //récupérer les coordonées
                        var coordinateMapper = new CoordinateMapper(kSensor);
                        var colorPoint       = coordinateMapper.MapSkeletonPointToColorPoint(position, ColorImageFormat.InfraredResolution640x480Fps30);
                        this.lblPosition.Text = string.Format(" Hand Position X : {0}, Y : {1}", colorPoint.X, colorPoint.Y);
                        //liaison de la souris au mouvement de la main droite
                        Cursor.Position = new Point(colorPoint.X, colorPoint.Y);
                        Cursor.Clip     = new Rectangle(this.Location, this.Size);
                    }
                }
            }
        }
Beispiel #19
0
        private void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame frame = e.OpenColorImageFrame())
            {
                if (frame != null)
                {
                    camera.Source = ColorFrameConverter.CovertToBitmap(frame, StreamDpiX, StreamDpiY);
                }
            }

            using (SkeletonFrame frame = e.OpenSkeletonFrame())
            {
                if (frame != null)
                {
                    canvas.Children.Clear();

                    _skeletons = new Skeleton[frame.SkeletonArrayLength];
                    frame.CopySkeletonDataTo(_skeletons);

                    foreach (var skeleton in _skeletons)
                    {
                        if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                        {
                            DrawBonesAndJoints(skeleton);
                        }
                    }

                    _skeletons = null;
                }
            }
        }
Beispiel #20
0
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var colorFrame = e.OpenColorImageFrame()) {
                if (colorFrame != null)
                {
                    var pixel = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo(pixel);

                    ImageRgb.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96,
                                                          PixelFormats.Bgr32, null, pixel, colorFrame.Width * 4);
                }
            }

            using (var depthFrame = e.OpenDepthImageFrame()) {
                if (depthFrame != null)
                {
                    // Depth情報を入れる
                    // GetRawPixelData()はインタラクションライブラリ内で実装された拡張メソッド
                    stream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp);
                }
            }

            using (var skeletonFrame = e.OpenSkeletonFrame()) {
                if (skeletonFrame != null)
                {
                    var skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletons);

                    // スケルトン情報を入れる
                    stream.ProcessSkeleton(skeletons, kinect.AccelerometerGetCurrentReading(), skeletonFrame.Timestamp);
                }
            }
        }
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame frame = e.OpenColorImageFrame())
        
            {
            
               if (frame == null)
                {
                    return;
                }
                byte[] pixels = new byte[frame.PixelDataLength];
                int stride = frame.Width * 4;
                frame.CopyPixelDataTo(pixels);
                imagecolor.Source = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);

                Skeleton first = GetFirstSkeleton(e);
                if (first == null)
                {
                    return;
                }
                //set scaled position
                //ScalePosition(lShoulderEllipse, first.Joints[JointType.ShoulderLeft]);
                //ScalePosition(rShoulderEllipse, first.Joints[JointType.ShoulderRight]);
                //ScalePosition(lKneeEllipse, first.Joints[JointType.KneeLeft]);
                //ScalePosition(rKneeEllipse, first.Joints[JointType.KneeRight]);
                //ScalePosition(rHandEllipse, first.Joints[JointType.HandRight]);
                GetCameraPoint(first, e);

            }
        }
Beispiel #22
0
        private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            depthFrame = e.OpenDepthImageFrame();
            colorFrame = e.OpenColorImageFrame();

            if (depthFrame != null && colorFrame != null)
            {
                depthFrame.CopyPixelDataTo(depthPixels);
                colorFrame.CopyPixelDataTo(colorPixels);

                Image <Bgr, Byte> convertImage = colorFrame.ToOpenCVImage <Bgr, Byte>();

                depthBitmap.WritePixels(new Int32Rect(0, 0, sensor.DepthStream.FrameWidth, sensor.DepthStream.FrameHeight), depthPixels, depthBitmapStride, 0);

                if (depthPixels != null && isCombineDepthToColor)
                {
                    RangeFilter();
                }

                colorBitmap.WritePixels(new Int32Rect(0, 0, sensor.ColorStream.FrameWidth, sensor.ColorStream.FrameHeight), colorPixels, colorBitmapStride, 0);
                colorImageViewer.Source = colorBitmap;
                depthImageViewer.Source = depthBitmap;

                //depthFrame.Dispose();
                //colorFrame.Dispose();
            }
        }
Beispiel #23
0
        /*
         * /// <summary>
         * /// Converts rotation quaternion to Euler angles
         * /// And then maps them to a specified range of values to control the refresh rate
         * /// v1(v1.5-v1.8)ではQuaternion使えない??
         * /// </summary>
         * /// <param name="rotQuaternion">face rotation quaternion</param>
         * /// <param name="pitch">rotation about the X-axis</param>
         * /// <param name="yaw">rotation about the Y-axis</param>
         * /// <param name="roll">rotation about the Z-axis</param>
         * private static void ExtractFaceRotationInDegrees(double qx, double qy, double qz, double qw, out double pitch, out double yaw, out double roll)
         * {
         *  double x = qx;
         *  double y = qy;
         *  double z = qz;
         *  double w = qw;
         *
         *  double pitchD, yawD, rollD;
         *
         *  // convert face rotation quaternion to Euler angles in degrees
         *
         *  pitchD = Math.Atan2(2 * ((y * z) + (w * x)), (w * w) - (x * x) - (y * y) + (z * z)) / Math.PI * 180.0;
         *  yawD = Math.Asin(2 * ((w * y) - (x * z))) / Math.PI * 180.0;
         *  rollD = Math.Atan2(2 * ((x * y) + (w * z)), (w * w) + (x * x) - (y * y) - (z * z)) / Math.PI * 180.0;
         *
         *  // clamp the values to a multiple of the specified increment to control the refresh rate
         *  double increment = FaceRotationIncrementInDegrees;
         *  pitch = (double)(Math.Floor((pitchD + ((increment / 2.0) * (pitchD > 0 ? 1.0 : -1.0))) / increment) * increment);
         *  yaw = (double)(Math.Floor((yawD + ((increment / 2.0) * (yawD > 0 ? 1.0 : -1.0))) / increment) * increment);
         *  roll = (double)(Math.Floor((rollD + ((increment / 2.0) * (rollD > 0 ? 1.0 : -1.0))) / increment) * increment);
         * }
         */

        /// <summary>
        /// Frames更新で実行されるイベント(eの中に更新されたデータを格納)
        /// kinectのメインループ
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                    using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
                    {
                        // Image部品にRGB表示
                        if (colorFrame != null)
                        {
                            imageRgbCamera.Source = colorFrame.ToBitmapSource();
                        }

                        // Skeletonが取れているとき表示
                        if (skeletonFrame != null)
                        {
                            foreach (var skeleton in skeletonFrame.GetTrackedSkeletons())
                            {
                                ShowSkeleton(skeleton);
                            }
                        }

                        // RGB, Depth, Skeltonフレームが取得できたら, 追跡されているSkeltonに対して顔認識
                        if ((colorFrame != null) && (depthFrame != null) && (skeletonFrame != null))
                        {
                            foreach (var skeleton in skeletonFrame.GetTrackedSkeletons())
                            {
                                FaceDataAcquisition(colorFrame, depthFrame, skeleton);
                            }
                        }
                    }
        }
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            using ( var colorFrame = e.OpenColorImageFrame() ) {
                if ( colorFrame != null ) {
                    var pixel = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo( pixel );

                    ImageRgb.Source = BitmapSource.Create( colorFrame.Width, colorFrame.Height, 96, 96,
                        PixelFormats.Bgr32, null, pixel, colorFrame.Width * 4 );
                }
            }

            using ( var depthFrame = e.OpenDepthImageFrame() ) {
                if ( depthFrame != null ) {
                    // Depth情報を入れる
                    // GetRawPixelData()はインタラクションライブラリ内で実装された拡張メソッド
                    stream.ProcessDepth( depthFrame.GetRawPixelData(), depthFrame.Timestamp );
                }
            }

            using ( var skeletonFrame = e.OpenSkeletonFrame() ) {
                if ( skeletonFrame != null ) {
                    var skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo( skeletons );

                    // スケルトン情報を入れる
                    stream.ProcessSkeleton( skeletons, kinect.AccelerometerGetCurrentReading(), skeletonFrame.Timestamp );
                }
            }
        }
        private void runtime_AllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    this.backgroundstream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp);
                }
            }

            using (var colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    this.backgroundstream.ProcessColor(colorFrame.GetRawPixelData(), colorFrame.Timestamp);
                }
            }

            using (var skeletonFrame = e.OpenSkeletonFrame())
            {
                if (null != skeletonFrame)
                {
                    skeletonFrame.CopySkeletonDataTo(this.skeletons);
                    this.backgroundstream.ProcessSkeleton(this.skeletons, skeletonFrame.Timestamp);
                }
            }

            this.ChooseSkeleton();
        }
Beispiel #26
0
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            image1.Source = e.OpenColorImageFrame().ToBitmapSource();

            // スケルトンフレームを取得する
            SkeletonFrame skeletonFrame = e.OpenSkeletonFrame();

            if (skeletonFrame != null)
            {
                // スケルトンデータを取得する
                Skeleton[] skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                skeletonFrame.CopySkeletonDataTo(skeletonData);

                // プレーヤーごとのスケルトンを描画する
                foreach (var skeleton in skeletonData)
                {
                    var head = skeleton.Joints[JointType.Head];
                    if (head.TrackingState == JointTrackingState.Tracked)
                    {
                        ColorImagePoint point = kinect.MapSkeletonPointToColor(head.Position, kinect.ColorStream.Format);
                        var             x     = image2.Width / 2;
                        var             y     = image2.Height / 2;

                        image2.Margin     = new Thickness(point.X - x, point.Y - y, 0, 0);
                        image2.Visibility = System.Windows.Visibility.Visible;
                    }
                }
            }
        }
Beispiel #27
0
        void sensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            depthImagePixels = new DepthImagePixel[sensor.DepthStream.FramePixelDataLength];
            using (var frame = e.OpenDepthImageFrame())
            {
                if (frame == null)
                {
                    return;
                }

                frame.CopyDepthImagePixelDataTo(depthImagePixels);
            }

            using (var frame = e.OpenColorImageFrame())
            {
                if (frame == null)
                {
                    return;
                }

                var bitmap = CreateBitmap(frame);
                VideoCanvas.Background = new ImageBrush(bitmap);
            }

            using (var frame = e.OpenSkeletonFrame())
            {
                if (frame == null)
                {
                    return;
                }

                var skeletons = new Skeleton[frame.SkeletonArrayLength];
                frame.CopySkeletonDataTo(skeletons);
                var skeleton = skeletons.FirstOrDefault(sk => sk.TrackingState == SkeletonTrackingState.Tracked);
                if (skeleton == null)
                {
                    return;
                }

                var rigthHandPosition     = skeleton.Joints[JointType.HandRight].Position;
                var leftHandPosition      = skeleton.Joints[JointType.HandLeft].Position;
                var headPosition          = skeleton.Joints[JointType.Head].Position;
                var armsPosition          = skeleton.Joints[JointType.ShoulderCenter].Position;
                var shoulderLeftPosition  = skeleton.Joints[JointType.ShoulderLeft].Position;
                var shoulderRigthPosition = skeleton.Joints[JointType.ShoulderRight].Position;
                var hipCenterPosition     = skeleton.Joints[JointType.HipCenter].Position;

                var mapper = new CoordinateMapper(sensor);

                var rightHandCoord     = mapper.MapSkeletonPointToColorPoint(rigthHandPosition, ColorImageFormat.RgbResolution640x480Fps30);
                var headCoord          = mapper.MapSkeletonPointToColorPoint(headPosition, ColorImageFormat.RgbResolution640x480Fps30);
                var armsCenterCoord    = mapper.MapSkeletonPointToColorPoint(armsPosition, ColorImageFormat.RgbResolution640x480Fps30);
                var shoulderLeftCoord  = mapper.MapSkeletonPointToColorPoint(shoulderLeftPosition, ColorImageFormat.RgbResolution640x480Fps30);
                var shoulderRightCoord = mapper.MapSkeletonPointToColorPoint(shoulderRigthPosition, ColorImageFormat.RgbResolution640x480Fps30);
                var leftHandCoord      = mapper.MapSkeletonPointToColorPoint(leftHandPosition, ColorImageFormat.RgbResolution640x480Fps30);
                var hipCenterCoord     = mapper.MapSkeletonPointToColorPoint(hipCenterPosition, ColorImageFormat.RgbResolution640x480Fps30);

                this.DetectGestures(headCoord, rightHandCoord, leftHandCoord, armsCenterCoord, shoulderLeftCoord, shoulderRightCoord, hipCenterCoord);
            }
        }
 private void AllFramesReady(object sender,AllFramesReadyEventArgs e)
 {
     using(ColorImageFrame colorImage=e.OpenColorImageFrame())
         using(SkeletonFrame skeletonFrame=e.OpenSkeletonFrame())
             if(colorImage!=null&&skeletonFrame!=null){
                 colorImage.CopyPixelDataTo(bitmappixels);
                 skeletonFrame.CopySkeletonDataTo(skeletons);
                 bitmap.WritePixels(updateRect,bitmappixels,bitmap.PixelWidth*sizeof(int),0);
                 using(DrawingContext drawingContext=drawingGroup.Open()){
                     drawingContext.DrawImage(bitmap,drawingRect);
                     //drawingContext.DrawGeometry(button1.IsHitting?Brushes.White:null,new Pen(Brushes.Blue,2.0),button1.Geometry);
                     //drawingContext.DrawGeometry(button2.IsHitting?Brushes.White:null,new Pen(Brushes.Blue,2.0),button2.Geometry);
                     foreach(Skeleton skel in skeletons){
                         if(skel.TrackingState==SkeletonTrackingState.Tracked){
                             foreach(Joint joint in skel.Joints){
                                 if(joint.TrackingState==JointTrackingState.Tracked){
                                     var depthPoint=sensor.MapSkeletonPointToDepth(joint.Position,DepthImageFormat.Resolution640x480Fps30);
                                     drawingContext.DrawEllipse(Brushes.Green,null,new Point(depthPoint.X,depthPoint.Y),15,15);
                                 }
                             }
                             drawingContext.DrawRectangle(Brushes.Red,null,new Rect(0.0,0.0,distance1.Distance,50.0));
                             drawingContext.DrawLine(new Pen(Brushes.Blue,10),volume1.MiddlePoint,volume1.RightHandLocation);
                             var mat=Matrix.Identity;
                             mat.RotateAt(volume1.Angle,volume1.MiddlePoint.X,volume1.MiddlePoint.Y);
                             drawingContext.DrawLine(new Pen(Brushes.Blue,10),volume1.MiddlePoint,mat.Transform(volume1.RightHandLocation));
                             drawingContext.DrawText(new FormattedText(volume1.Angle.ToString(),CultureInfo.CurrentCulture,FlowDirection.LeftToRight,new Typeface("MS Gothic"),150,Brushes.Blue),new Point());
                             break;
                         }
                     }
                 }
             }
     return;
 }
        //happens when all frames (color, depth, and skeleton) are ready for use
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            //throw new NotImplementedException();
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                //calculates sizing of image
                int stride = colorFrame.Width * 4;

                Vid.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
            }

            Skeleton me = null;

            getSkeleton(e, ref me);

            if (me == null)
            {
                return;
            }

            getCameraPoint(me, e);
        }
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            // 赤外線画像を表示する
            using ( ColorImageFrame colorFrame = e.OpenColorImageFrame() ) {
                if ( colorFrame != null ) {
                    // 赤外線画像データを取得する
                    byte[] color = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo( color );

                    // 赤外線画像を表示する(16bitのグレースケール)
                    imageInfrared.Source = BitmapSource.Create( colorFrame.Width, colorFrame.Height,
                        96, 96, PixelFormats.Gray16, null, color,
                        colorFrame.Width * colorFrame.BytesPerPixel );
                }
            }

            // 距離データを表示する
            using ( DepthImageFrame depthFrame = e.OpenDepthImageFrame() ) {
                if ( depthFrame != null ) {
                    // 可視画像に変換する
                    short[] depth = new short[depthFrame.PixelDataLength];
                    depthFrame.CopyPixelDataTo( depth );

                    for ( int i = 0; i < depth.Length; i++ ) {
                        depth[i] = (short)~depth[i];
                    }

                    imageDepth.Source = BitmapSource.Create( depthFrame.Width, depthFrame.Height,
                        96, 96, PixelFormats.Gray16, null, depth,
                        depthFrame.Width * depthFrame.BytesPerPixel );
                }
            }
        }
Beispiel #31
0
 private void OnAllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     if (Recording)
     {
         using (SkeletonFrame SkeletonFrame = e.OpenSkeletonFrame())
         {
             if (SkeletonFrame != null)
             {
                 KinectSkeletonRecorder.Record(SkeletonFrame);
             }
         }
         using (ColorImageFrame ColorImageFrame = e.OpenColorImageFrame())
         {
             if (ColorImageFrame != null)
             {
                 KinectColorRecorder.Record(ColorImageFrame);
             }
         }
         using (DepthImageFrame DepthImageFrame = e.OpenDepthImageFrame())
         {
             if (DepthImageFrame != null)
             {
                 KinectDepthRecorder.Record(DepthImageFrame);
             }
         }
     }
 }
Beispiel #32
0
        // すべてのデータの更新通知を受け取る
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // Disposableなのでusingでくくる
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    imageRgbCamera.Source = colorFrame.ToBitmapSource();
                }
            }

            // Disposableなのでusingでくくる
            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    imageDepthCamera.Source = depthFrame.ToBitmapSource();
                }
            }

            // Disposableなのでusingでくくる
            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    // 骨格位置の表示
                    ShowSkeleton(skeletonFrame);
                }
            }
        }
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            using (var colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }

                // Make a copy of the color frame for displaying.
                var haveNewFormat = this.currentColorImageFormat != colorImageFrame.Format;
                if (haveNewFormat)
                {
                    this.currentColorImageFormat = colorImageFrame.Format;
                    this.colorImageData = new byte[colorImageFrame.PixelDataLength];
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    ColorImage.Source = this.colorImageWritableBitmap;
                }

                colorImageFrame.CopyPixelDataTo(this.colorImageData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImageData,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

            }
        }
Beispiel #34
0
 private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
     {
         if (colorFrame != null)
         {
             colorFrame.CopyPixelDataTo(this.colorPixels);
             this.colorBitmap.WritePixels(
                 new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                 this.colorPixels,
                 this.colorBitmap.PixelWidth * sizeof(int),
                 0);
             int sx = (int)this.sld_c1_sX.Value;
             int sy = (int)this.sld_c1_sY.Value;
             int dx = (int)this.sld_c1_dX.Value;
             int dy = (int)this.sld_c1_dY.Value;
             int w  = 0;
             int h  = 0;
             if (dx >= sx)
             {
                 w = (dx - sx);
             }
             if (dy >= sy)
             {
                 h = (dy - sy);
             }
             float             cx        = (float)sx + ((float)w) / 2;
             float             cy        = (float)sy + ((float)h) / 2;
             Image <Bgr, Byte> openCVImg = new Image <Bgr, byte>(colorBitmap.ToBitmap());
             box = new MCvBox2D(new PointF(cx, cy), new SizeF(new PointF((float)w, (float)h)), 0);
             openCVImg.Draw(box, new Bgr(System.Drawing.Color.Green), 4);
             this.cimg_cage4.Source = ImageHelpers.ToBitmapSource(openCVImg);
         }
     }
 }
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame frame = e.OpenColorImageFrame())

            {
                if (frame == null)
                {
                    return;
                }
                byte[] pixels = new byte[frame.PixelDataLength];
                int    stride = frame.Width * 4;
                frame.CopyPixelDataTo(pixels);
                imagecolor.Source = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);

                Skeleton first = GetFirstSkeleton(e);
                if (first == null)
                {
                    return;
                }
                //set scaled position
                //ScalePosition(lShoulderEllipse, first.Joints[JointType.ShoulderLeft]);
                //ScalePosition(rShoulderEllipse, first.Joints[JointType.ShoulderRight]);
                //ScalePosition(lKneeEllipse, first.Joints[JointType.KneeLeft]);
                //ScalePosition(rKneeEllipse, first.Joints[JointType.KneeRight]);
                //ScalePosition(rHandEllipse, first.Joints[JointType.HandRight]);
                GetCameraPoint(first, e);
            }
        }
Beispiel #36
0
        /// <summary>
        /// Event handler for Kinect sensor's DepthFrameReady event
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            // in the middle of shutting down, so nothing to do
            if (null == this.sensor)
            {
                return;
            }

            bool depthReceived = false;
            bool colorReceived = false;

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (null != depthFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyDepthImagePixelDataTo(this.depthPixels);
                    depthReceived = true;
                }
            }

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (null != colorFrame)
                {
                    // Copy the pixel data from the image to a temporary array
                    colorFrame.CopyPixelDataTo(this.colorPixels);
                    colorReceived = true;
                }
            }

            // do our processing outside of the using block
            // so that we return resources to the kinect as soon as possible
            if ((true == depthReceived) && (true == colorReceived))
            {
                this.sensor.CoordinateMapper.MapDepthFrameToColorFrame(
                    DepthFormat,
                    this.depthPixels,
                    ColorFormat,
                    this.colorCoordinates);

                //draw the WritableBitmap
                // colorBitmap.WritePixels(new Int32Rect(0, 0, colorBitmap.PixelWidth, colorBitmap.PixelHeight),
                // bitMapBits,
                // colorBitmap.PixelWidth * sizeof(int), 0);
                //                    this.mappedImage.Source = bitMap;}}
//                this. = colorBitmap;
                // do our processing outside of the using block
                // so that we return resources to the kinect as soon as possible
                if (true == colorReceived)
                {
                    // Write the pixel data into our bitmap
                    this.colorBitmap.WritePixels(
                        new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight),
                        this.colorPixels,
                        this.colorBitmap.PixelWidth * sizeof(int),
                        0);
                }
            }
        }
Beispiel #37
0
        // FrameReady のイベントのハンドラ
        // (画像情報を取得,顔の部分にマスクを上書きして描画)
        private void AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            KinectSensor kinect = sender as KinectSensor;

            // タイマーを更新
            CapTimer.Text = ("" + capTimer);
            if (dataFlag == true)
            {
                capTimer++;
                if (capTimer >= 300)
                {
                    dataFlag = false;
                    capTimer = -90;
                }
            }
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                // using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) /*TEST_DEPTHFRAME*/
                using (SkeletonFrame skelFrame = e.OpenSkeletonFrame())
                {
                    if (colorFrame != null
                        // && depthFrame != null /*TEST_DEPTHFRAME*/
                        && skelFrame != null
                        )
                    {
                        // 画像情報,(深度情報,)骨格情報をバッファに保存
                        colorFrame.CopyPixelDataTo(pixelBuffer);
                        // depthFrame.CopyPixelDataTo(depthBuffer); /*TEST_DEPTHFRAME*/
                        skelFrame.CopySkeletonDataTo(skeletonBuffer);

                        // 教示者の骨格情報を取得
                        getHeadPoints(skelFrame);
                        // 物体位置を取得
                        getBoxPoints(colorFrame, null /*TEST_DEPTHFRAME*/);
                    }
                }
            // ログデータを保存
            writeLog();

            // 描画
            using (ColorImageFrame imageFrame = e.OpenColorImageFrame())
            {
                if (imageFrame != null)
                {
                    fillBitmap(kinect, imageFrame);
                }
            }
        }
        bool working = false; // Skip frames if we're still processing stuff.

        public void CalibrationAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (working) return;
            working = true;

            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    working = false;
                    return;
                }

                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame == null)
                    {
                        working = false;
                        return;
                    }

                    //byte[] pixels = new byte[colorFrame.PixelDataLength];
                    //colorFrame.CopyPixelDataTo(pixels);
                    //int stride = colorFrame.Width * 4;
                    //debugImage.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
                    //debugImage.Visibility = Visibility.Visible;

                    //int code_num = find_code(colorFrame, depthFrame);
                    int code_num = find_touch(colorFrame, depthFrame);
                    if (code_num >= 0)
                    {
                        // Make the next code visible.
                        if (code_num < 4)
                        {
                            codes[code_num].Visibility = Visibility.Hidden;
                            codes[code_num + 1].Visibility = Visibility.Visible;
                            next_code_num++;
                            Thread.Sleep(3000);
                        }
                        else
                        {
                            Thread.Sleep(3000);
                            // We are done. Calculate the coefficients.
                            sensor.AllFramesReady -= this.CalibrationAllFramesReady;
                            codes[4].Visibility = Visibility.Hidden;
                            kinectController.calibration_coefficients = get_calibration_coeffs();
                            
                            Point center_top_left = code_points[0];
                            Point center_bot_right = code_points[4];
                            kinectController.Calibrate((int)(center_top_left.X + 1.25*code_size.X), (int)(center_top_left.Y + 0.7*code_size.Y), (int)(center_bot_right.X - 1.25*code_size.X), (int)(center_bot_right.Y - 0.8*code_size.Y));
                            sensor.AllFramesReady += kinectController.SensorAllFramesReady;
                            CalibrationDidComplete();
                        }
                    }
                }
            }

            working = false;
        }
Beispiel #39
0
        void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var Frame = e.OpenColorImageFrame())
            {
                if (Frame != null)
                {
                    Frame.CopyPixelDataTo(Pixels);
                    Bitmap.WritePixels(
                        new Int32Rect(0, 0, Bitmap.PixelWidth, Bitmap.PixelHeight),
                        Pixels, Bitmap.PixelWidth * sizeof(int), 0);
                }
            }

            using (var Frame = e.OpenSkeletonFrame())
            {
                if (Frame != null)
                {
                    var skeletons = new Skeleton[Frame.SkeletonArrayLength];
                    Frame.CopySkeletonDataTo(skeletons);
                    var skel = (from s in skeletons where s.TrackingState == SkeletonTrackingState.Tracked select s).FirstOrDefault();
                    if (skel != null)
                    {
                        var left_hand = skel.Joints[JointType.HandLeft];
                        var rite_hand = skel.Joints[JointType.HandRight];
                        var shoulderRight = skel.Joints[JointType.ShoulderRight];
                        var fleft = skel.Joints[JointType.FootLeft];
                        var frite = skel.Joints[JointType.FootRight];

                        Joint head = skel.Joints[JointType.Head];

                        Point mousePos = new Point((rite_hand.Position.X * 1300 + 683), (rite_hand.Position.Y * -1300 + 768));

                        //двойное нажатие левой кнопкой мыши
                        if (distance(head.Position, left_hand.Position) < 0.06f) {
                            NatiteMethods.sendMouseDoubleClick(mousePos);

                        // правая кнопка мыши
                        } else if(distance(left_hand.Position, rite_hand.Position) < 0.03f) {
                            NatiteMethods.mouseLeftButtonDown(mousePos);

                        // перетаскивание
                        } else if(distance(shoulderRight.Position, left_hand.Position) < 0.03f) {
                            NatiteMethods.sendMouseRightclick(mousePos);
                        }
                        if (fleft.Position.Y <= fleft.Position.Y + 0.5f) {

                            // Get the virtual key code for the character we want to press
                            //int key = 87;
                            //uint vkKey = NatiteMethods.VkKeyScan((char) key);

                            //NatiteMethods.keybd_event(vkKey, 0, 0, 0);
                            //NatiteMethods.keybd_event(vkKey, 0, 2, 0);
                        }

                        NatiteMethods.SetCursorPos((int) mousePos.X, (int) mousePos.Y);
                    }
                }
            }
        }
Beispiel #40
0
        private void SensorAllFrameReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame != null)
                {
                    colorFrame.CopyPixelDataTo(this.colorPixels);
                }
                else
                {
                    return;
                }
            }

            using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
            {
                if (depthFrame != null)
                {
                    depthFrame.CopyPixelDataTo(this.depthPixelsShort);
                }
            }

            using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame())
            {
                if (skeletonFrame != null)
                {
                    skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                }
            }

            foreach (Skeleton skeleton in this.skeletonData)
            {
                if (skeleton == null)
                {
                    continue;
                }

                if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
                {
                    _gestureController.Update(skeleton);

                    faceFrame = faceTracker.Track(this.sensor.ColorStream.Format, this.colorPixels,
                                                  this.sensor.DepthStream.Format, this.depthPixelsShort, skeleton);
                }
            }

            colBitmap = new Bitmap(colorWidth, colorHeight);

            BitmapData colImageBitmapData = colBitmap.LockBits(new Rectangle(0, 0, colBitmap.Width, colBitmap.Height),
                                                               ImageLockMode.WriteOnly,
                                                               colBitmap.PixelFormat);
            IntPtr IptrColImage = colImageBitmapData.Scan0;

            Marshal.Copy(colorPixels, 0, IptrColImage, colorPixels.Length);
            colBitmap.UnlockBits(colImageBitmapData);

            //显示Bitmap
            UpdateColImage(colBitmap);
        }
Beispiel #41
0
 private void AllFramesReady(object sender,AllFramesReadyEventArgs e)
 {
     if(secondBuffer!=null){
         using(ColorImageFrame colorImage=e.OpenColorImageFrame())
             using(SkeletonFrame skeletonFrame=e.OpenSkeletonFrame())
                 if(colorImage!=null&&skeletonFrame!=null){
                     skeletonFrame.CopySkeletonDataTo(skeletons);
                     //colorImage.CopyPixelDataTo(bitmappixels);
                     //bitmap.WritePixels(updateRect,bitmappixels,bitmap.PixelWidth*sizeof(int),0);
                     using(DrawingContext drawingContext=drawingGroup.Open()){
                         //drawingContext.DrawImage(bitmap,drawingRect);
                         drawingContext.DrawRectangle(Brushes.Black,null,drawingRect);
                         var redPen=new Pen(Brushes.Red,5.0);
                         drawingContext.DrawGeometry(button.IsHitting?Brushes.White:null,redPen,button.Geometry);
                         drawingContext.DrawGeometry(muteButton.IsHitting?Brushes.White:null,redPen,muteButton.Geometry);
                         //drawingContext.DrawGeometry(boostButton.IsHitting?Brushes.White:null,redPen,boostButton.Geometry);
                         drawingContext.DrawGeometry(gateButton.IsHitting?Brushes.White:null,redPen,gateButton.Geometry);
                         drawingContext.DrawText(new FormattedText(secondBuffer.Status.Playing?"■":"▶",CultureInfo.CurrentCulture,FlowDirection.LeftToRight,new Typeface("メイリオ"),44,Brushes.Red),new Point(0,0));
                         foreach(Skeleton skel in skeletons){
                             if(skel.TrackingState==SkeletonTrackingState.Tracked){
                                 foreach(Joint joint in skel.Joints){
                                     if(joint.TrackingState==JointTrackingState.Tracked){
                                         var depthPoint=sensor.CoordinateMapper.MapSkeletonPointToDepthPoint(joint.Position,DepthImageFormat.Resolution640x480Fps30);
                                         drawingContext.DrawEllipse(Brushes.Green,null,new Point(depthPoint.X,depthPoint.Y),10,10);
                                     }
                                 }
                                 if(mode==1){
                                     drawingContext.DrawLine(new Pen(Brushes.DarkBlue,20),distance.Joint1Location,distance.Joint2Location);
                                     var volume=-10000+(distance.Distance<=250?distance.Distance:250)*35;
                                     if(timer.IsEnabled) prevvolume=volume;
                                     else secondBuffer.Volume=volume;
                                 }else if(mode==2){
                                     var pen=new Pen(new SolidColorBrush(Color.FromArgb(0x7F,0,0,0x8B)),12.5);
                                     drawingContext.DrawLine(pen,volume.MiddlePoint,volume.RightHandLocation);
                                     var mat=Matrix.Identity;
                                     mat.RotateAt(volume.Angle,volume.MiddlePoint.X,volume.MiddlePoint.Y);
                                     drawingContext.DrawLine(pen,volume.MiddlePoint,mat.Transform(volume.RightHandLocation));
                                     settings[0].Gain=15/180*(-volume.Angle);
                                     settings[1].Gain=10/180*(-volume.Angle);
                                     settings[2].Gain=15/180*(volume.Angle);
                                     settings[3].Gain=15/180*(volume.Angle);
                                     settings[4].Gain=15/180*(volume.Angle);
                                     for(int i=0;i<settings.Length;i++){
                                         var effectInst=(ParamEqEffect)secondBuffer.GetEffects(i);
                                         effectInst.AllParameters=settings[i];
                                     }
                                 }
                                 drawingContext.DrawText(new FormattedText("ControlMode:"+(mode==0?"None":mode==1?"Volume":"Filter"),CultureInfo.CurrentCulture,FlowDirection.LeftToRight,new Typeface("メイリオ"),40,Brushes.White),new Point(0,400));
                                 if(muteButton.IsHitting){
                                     secondBuffer.Volume=-10000;
                                 }
                                 break;
                             }
                         }
                     }
                 }
     }
     return;
 }
        // すべてのデータの更新通知を受け取る
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            imageRgbCamera.Source = e.OpenColorImageFrame().ToBitmapSource();
            //imageDepthCamera.Source = e.OpenDepthImageFrame().ToBitmapSource();

            // 骨格位置の表示
            ShowSkeleton(e);
        }
 private void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     byte[] imagem = ObterImagemSensorRGB(e.OpenColorImageFrame());
     if (chkEscalaCinza.IsChecked.HasValue && chkEscalaCinza.IsChecked.Value)
         ReconhecerDistancia(e.OpenDepthImageFrame(), imagem, 2000);
     if (imagem != null)
         imagemCamera.Source = BitmapSource.Create(kinect.ColorStream.FrameWidth, kinect.ColorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null, imagem, kinect.ColorStream.FrameBytesPerPixel * kinect.ColorStream.FrameWidth);
 }
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            using (var colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }

                // Make a copy of the color frame for displaying.
                var haveNewFormat = this.currentColorImageFormat != colorImageFrame.Format;
                if (haveNewFormat)
                {
                    this.currentColorImageFormat = colorImageFrame.Format;
                    this.colorImageData = new byte[colorImageFrame.PixelDataLength];
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    ColorImage.Source = this.colorImageWritableBitmap;
                }

                colorImageFrame.CopyPixelDataTo(this.colorImageData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImageData,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);

                /*
                double length = Point.Subtract(faceTrackingViewer.ppLeft, faceTrackingViewer.ppRight).Length;
                tB.Text = length.ToString();
                if (length > 19)
                {
                    elp.Fill = Brushes.Red;
                }
                else
                {
                    elp.Fill = Brushes.Green;
                }
                */

                double mouthWidth = faceTrackingViewer.mouthWidth;
                double noseWidth = faceTrackingViewer.noseWidth;

                double threshold = noseWidth * modifyValue;

                tBMouthDistance.Text = mouthWidth.ToString();
                tbThreshold.Text = threshold.ToString();

                if (mouthWidth > threshold)
                {
                    elp.Fill = Brushes.Red;
                }
                else
                {
                    elp.Fill = Brushes.Green;
                }
            }
        }
Beispiel #45
0
        /// <summary>
        /// Manage frames of kinect sensor according to the services activated
        /// </summary>
        /// <param name="e"></param>
        private void ManageAllFrame(AllFramesReadyEventArgs e)
        {
            if (!IsRunning)
            {
                return;
            }

            // SkeletonTracking Frame Manager
            using (SkeletonFrame SFrame = e.OpenSkeletonFrame())
            {
                try
                {
                    ManageSkeletonFrame(SFrame);
                }
                catch (Exception ex)
                {
                    // Just log the error
                    Console.Error.WriteLine("Error with skeleton frame : " + ex.Message + " _ " + ex.StackTrace);
                }
            }

            // Color Frame Manager
            if (PropertiesPluginKinect.Instance.EnableColorFrameService)
            {
                using (ColorImageFrame CFrame = e.OpenColorImageFrame())
                {
                    try
                    {
                        ManageColorFrame(CFrame);
                    }
                    catch (Exception ex)
                    {
                        // Just log the error
                        Console.Error.WriteLine("Error with color frame : " + ex.Message + " _ " + ex.StackTrace);
                    }
                }
            }

            // Depth Frame Manager
            if (PropertiesPluginKinect.Instance.EnableDepthFrameService ||
                PropertiesPluginKinect.Instance.KinectPointingModeEnabled ||
                PropertiesPluginKinect.Instance.EnableGestureGrip)
            {
                using (DepthImageFrame DFrame = e.OpenDepthImageFrame())
                {
                    try
                    {
                        ManageDepthFrame(DFrame);
                    }
                    catch (Exception ex)
                    {
                        // Just log the error
                        Console.Error.WriteLine("Error with depth frame : " + ex.Message + " _ " + ex.StackTrace);
                    }

                }
            }
        }
 // すべてのデータの更新通知を受け取る
 void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
 {
     using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
     {
         using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
         {
             RenderScreen(colorFrame, depthFrame);
         }
     }
 }
Beispiel #47
0
        void NewSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return; 
                }

                Coding4Fun.Kinect.Wpf.WriteableBitmapHelper.WritePixelsForColorImageFrame(colorFrame, _colorBitmap); 
            }
        }
Beispiel #48
0
        void mySensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame c = e.OpenColorImageFrame();

            if (c == null) return;

            c.CopyPixelDataTo(myColorArray);

            myBitmap.WritePixels(
                        new Int32Rect(0, 0, myBitmap.PixelWidth, myBitmap.PixelHeight),
                        myColorArray,
                        myBitmap.PixelWidth * 2,
                        0);
            c.Dispose();
        }
        private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            /*  foreach (var userDistractionStatus in FaceTrackingViewer.userDistractionStatus  )
              {
                  bool userFoundInSession = false;
                  foreach (var sessionUser in session.sessionUsers)
                  {
                      if (userDistractionStatus.Key == sessionUser.userID)
                      {
                          sessionUser.userDistracted = userDistractionStatus.Value;
                          userFoundInSession = true;
                          continue;
                      }
                  }
                  if (!userFoundInSession)
                  {
                      UserProfile user = new UserProfile(userDistractionStatus.Key, "Male", true, true, userDistractionStatus.Value);
                      session.AddUser(user);
                  }
              }
              yaw.Text = "Distraction  " + FaceTrackingViewer.totalDistractiony;
              yaw2.Text = "Distraction  " + FaceTrackingViewer.rotationOldy;*/
            using (var colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame())
            {
                if (colorImageFrame == null)
                {
                    return;
                }

                // Make a copy of the color frame for displaying.
                var haveNewFormat = this.currentColorImageFormat != colorImageFrame.Format;
                if (haveNewFormat)
                {
                    this.currentColorImageFormat = colorImageFrame.Format;
                    this.colorImageData = new byte[colorImageFrame.PixelDataLength];
                    this.colorImageWritableBitmap = new WriteableBitmap(
                        colorImageFrame.Width, colorImageFrame.Height, 96, 96, PixelFormats.Bgr32, null);
                    ColorImage.Source = this.colorImageWritableBitmap;
                }

                colorImageFrame.CopyPixelDataTo(this.colorImageData);
                this.colorImageWritableBitmap.WritePixels(
                    new Int32Rect(0, 0, colorImageFrame.Width, colorImageFrame.Height),
                    this.colorImageData,
                    colorImageFrame.Width * Bgr32BytesPerPixel,
                    0);
            }
        }
        //returns the latest picture!!!!!
        void FramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame VFrame = e.OpenColorImageFrame();
            if (VFrame == null) return;
            byte[] pixelS = new byte[VFrame.PixelDataLength];
            bmp = ImageToBitmap(VFrame);

            SkeletonFrame SFrame = e.OpenSkeletonFrame();
            if (SFrame == null) return;

            Graphics g = Graphics.FromImage(bmp);
            Skeleton[] Skeletons = new Skeleton[SFrame.SkeletonArrayLength];
            SFrame.CopySkeletonDataTo(Skeletons);

            foreach (Skeleton S in Skeletons)
            {
                if (S.TrackingState == SkeletonTrackingState.Tracked)
                {

                    //body
                    DrawBone(JointType.Head, JointType.ShoulderCenter, S, g);
                    DrawBone(JointType.ShoulderCenter, JointType.Spine, S, g);
                    DrawBone(JointType.Spine, JointType.HipCenter, S, g);
                    //left leg
                    DrawBone(JointType.HipCenter, JointType.HipLeft, S, g);
                    DrawBone(JointType.HipLeft, JointType.KneeLeft, S, g);
                    DrawBone(JointType.KneeLeft, JointType.AnkleLeft, S, g);
                    DrawBone(JointType.AnkleLeft, JointType.FootLeft, S, g);
                    //Right Leg
                    DrawBone(JointType.HipCenter, JointType.HipRight, S, g);
                    DrawBone(JointType.HipRight, JointType.KneeRight, S, g);
                    DrawBone(JointType.KneeRight, JointType.AnkleRight, S, g);
                    DrawBone(JointType.AnkleRight, JointType.FootRight, S, g);
                    //Left Arm
                    DrawBone(JointType.ShoulderCenter, JointType.ShoulderLeft, S, g);
                    DrawBone(JointType.ShoulderLeft, JointType.ElbowLeft, S, g);
                    DrawBone(JointType.ElbowLeft, JointType.WristLeft, S, g);
                    DrawBone(JointType.WristLeft, JointType.HandLeft, S, g);
                    //Right Arm
                    DrawBone(JointType.ShoulderCenter, JointType.ShoulderRight, S, g);
                    DrawBone(JointType.ShoulderRight, JointType.ElbowRight, S, g);
                    DrawBone(JointType.ElbowRight, JointType.WristRight, S, g);
                    DrawBone(JointType.WristRight, JointType.HandRight, S, g);

                }

            }
        }
        private void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs allFrameEvent)
        {
            byte[] imagem = ObterImagemSensorRGB(allFrameEvent.OpenColorImageFrame());

            FuncoesProfundidade(allFrameEvent.OpenDepthImageFrame(), imagem, 2000);
            if (imagem != null)
            {
                 canvasKinect.Background = new ImageBrush(BitmapSource.Create(Kinect.ColorStream.FrameWidth,
                               Kinect.ColorStream.FrameHeight,
                               96, 96, PixelFormats.Bgr32, null,
                               imagem, Kinect.ColorStream.FrameBytesPerPixel * Kinect.ColorStream.FrameWidth));
            }

            canvasKinect.Children.Clear();
            FuncoesEsqueletoUsuario(allFrameEvent.OpenSkeletonFrame());
        }
        // すべてのデータの更新通知を受け取る
        void kinect_AllFramesReady( object sender, AllFramesReadyEventArgs e )
        {
            // Disposableなのでusingでくくる
            using ( ColorImageFrame colorFrame = e.OpenColorImageFrame() ) {
                if ( colorFrame != null ) {
                    imageRgbCamera.Source = colorFrame.ToBitmapSource();
                }
            }

            // Disposableなのでusingでくくる
            using ( DepthImageFrame depthFrame = e.OpenDepthImageFrame() ) {
                if ( depthFrame != null ) {
                    imageDepthCamera.Source = depthFrame.ToBitmapSource();
                }
            }
        }
Beispiel #53
0
        void NewSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {

            //auto clean up after copying pixels
            using (var colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    //lost frames
                    return; 
                }
                colorFrame.CopyPixelDataTo(_colorPixels); 

                _colorBitmap.WritePixels(_imageSize, _colorPixels, _stride, 0);
            }
        }
        private void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            byte[] imagem =  ObterImagemSensorRGB(e.OpenColorImageFrame());

            if( chkEscalaCinza.IsChecked.HasValue && chkEscalaCinza.IsChecked.Value)
                ReconhecerDistancia(e.OpenDepthImageFrame(),imagem, 2000);

            if (imagem != null)
                canvasKinect.Background = new ImageBrush(BitmapSource.Create(kinect.ColorStream.FrameWidth, kinect.ColorStream.FrameHeight,
                                    96, 96, PixelFormats.Bgr32, null, imagem,
                                    kinect.ColorStream.FrameWidth * kinect.ColorStream.FrameBytesPerPixel));

            canvasKinect.Children.Clear();
            DesenharEsqueletoUsuario(e.OpenSkeletonFrame());
            
        }
Beispiel #55
0
        /*
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
          
        }
        */
        void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame cframe = e.OpenColorImageFrame())
            {

                if (cframe == null)
                    return;

                byte[] cbytes = new byte[cframe.PixelDataLength];
                cframe.CopyPixelDataTo(cbytes);

                int stride = cframe.Width *4;

                imgkinect.Source = BitmapImage.Create(640,480,96,96,PixelFormats.Bgr32,null,cbytes,stride);
            }
        }
        /// <summary>
        /// Handles the Kinect AllFramesReady event
        /// </summary>
        private void Sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            ColorImageFrame colorFrame = null;
            DepthImageFrame depthFrame = null;
            Skeleton[] skeletonData;

            try
            {
                colorFrame = e.OpenColorImageFrame();
                depthFrame = e.OpenDepthImageFrame();

                using (var skeletonFrame = e.OpenSkeletonFrame())
                {
                    if (colorFrame == null || depthFrame == null || skeletonFrame == null)
                        return;

                    skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                    skeletonFrame.CopySkeletonDataTo(skeletonData);
                }

                // Find a skeleton to track.
                // First see if our old one is good.
                // When a skeleton is in PositionOnly tracking state, don't pick a new one
                // as it may become fully tracked again.
                Skeleton skeletonOfInterest = skeletonData.FirstOrDefault(s => s.TrackingId == this.trackedSkeletonId && s.TrackingState != SkeletonTrackingState.NotTracked);

                if (skeletonOfInterest == null)
                {
                    // Old one wasn't around.  Find any skeleton that is being tracked and use it.
                    skeletonOfInterest = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked);

                    if (skeletonOfInterest != null)
                        this.trackedSkeletonId = skeletonOfInterest.TrackingId;
                }

                if (this.FrameDataUpdated != null)
                    this.FrameDataUpdated(this, new FrameData(colorFrame, depthFrame, skeletonOfInterest));
            }
            finally
            {
                if (colorFrame != null)
                    colorFrame.Dispose();

                if (depthFrame != null)
                    depthFrame.Dispose();
            }
        }
        void Sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
            {
                if (colorFrame == null)
                {
                    return;
                }

                byte[] pixels = new byte[colorFrame.PixelDataLength];
                colorFrame.CopyPixelDataTo(pixels);

                wBitmap = new WriteableBitmap(colorFrame.Width,
                                                  colorFrame.Height,
                    // Standard DPI
                                                  96, 96,
                    // Current format for the ColorImageFormat
                                                  PixelFormats.Bgr32,
                    // BitmapPalette
                                                  null);

                wBitmap.WritePixels(
                    // Represents the size of our image
                new Int32Rect(0, 0, colorFrame.Width, colorFrame.Height),
                    // Our image data
                pixels,
                    // How much bytes are there in a single row?
                colorFrame.Width * colorFrame.BytesPerPixel,
                    // Offset for the buffer, where does he need to start
                0);

                this.RGBImage.DisplayImage.Source = wBitmap;
                //int stride = colorFrame.Width * 4;

                //this.RGBImage.DisplayImage.Source =
                //    BitmapSource.Create(colorFrame.Width,
                //    colorFrame.Height,
                //    96,
                //    96,
                //    PixelFormats.Bgr32,
                //    null,
                //    pixels,
                //    stride);
            }
        }
        void KinectSensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            using (var frame = e.OpenColorImageFrame())
            {
                if (frame == null)
                    return;

                var pixelData = new byte[frame.PixelDataLength];
                frame.CopyPixelDataTo(pixelData);
                if (ImageSource == null)
                    ImageSource = new WriteableBitmap(frame.Width, frame.Height, 96, 96,
                            PixelFormats.Bgr32, null);

                var stride = frame.Width * PixelFormats.Bgr32.BitsPerPixel / 8;
                //ImageSource = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, pixelData, stride);
                ImageSource.WritePixels(new Int32Rect(0, 0, frame.Width, frame.Height), pixelData, stride, 0);
            }
        }
Beispiel #59
0
		void RuntimeColorFrameReady(AllFramesReadyEventArgs e)
		{

			using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
			{
				if (colorFrame == null)
				{
					return; 
				}

				ColorImage.Image = colorFrame.ToBitmap();

				if (_saveColorFrame)
				{
					_saveColorFrame = false;
					colorFrame.ToBitmap().Save(DateTime.Now.ToString("yyyyMMddHHmmss") + "_color.jpg", ImageFormat.Jpeg);
				}
			}           
		}
        public void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            bool depthReceived = false;
              bool colorReceived = false;

              using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) {
            if (null != depthFrame) {
              // Copy the pixel data from the image to a temporary array
              depthFrame.CopyDepthImagePixelDataTo(depthPixels);
              depthReceived = true;
            }
              }

              using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
            if (null != colorFrame) {
              // Done by WSRKinectMacro
              colorReceived = true;
            }
              }

              if (depthReceived) {
            HandleDepth();
              }

              if (colorReceived) {
            HandleColor();
              }

              WSRKinect wsr = (WSRKinect)WSRConfig.GetInstance().GetWSRMicro();
              WriteableBitmap resize = colorBitmap.Resize(320, 240, WriteableBitmapExtensions.Interpolation.Bilinear);
              Bitmap image = wsr.GetColorPNG(resize, false);
              MemoryStream ms = new MemoryStream();

              image.Save(ms, format);
              image.Dispose();
              byte[] imgByte = ms.ToArray();
              base64String = Convert.ToBase64String(imgByte);

              // lock (sockets) {
              //   foreach (var socket in sockets) { SendWebSocket(socket, image); }
              //   image.Dispose();
              // }
        }