/// <summary>
 /// 深度画像座標からワールド空間座標へ変換するやつ
 /// </summary>
 /// <param name="depthSpacePoint"></param>
 /// <param name="depth"></param>
 /// <returns></returns>
 public CameraSpacePoint MapDepthPointToCameraSpace(DepthSpacePoint depthSpacePoint, ushort depth)
 {
     float distance = (float)depth / 1000;
     int index = (int)(this.depthWidth * depthSpacePoint.Y + depthSpacePoint.X);
     PointF cameraPoint = this.depthFrameToCameraSpaceTable[index];
     return new CameraSpacePoint() { X = cameraPoint.X * distance, Y = cameraPoint.Y * distance, Z = distance };
 }
示例#2
0
 /// <summary>
 /// Create a new <c>CustomJoint</c> object based on the supplied
 /// <c>JointType</c> value.
 /// </summary>
 public CustomJoint(JointType type)
 {
     _jointType = type;
     _position = new CameraSpacePoint();
     _depthPosition = new DepthSpacePoint();
     _colorPosition = new ColorSpacePoint();
     _trackingState = TrackingState.NotTracked;
 }
 /// <summary>
 /// 深度画像座標からカラー画像座標へ変換するやつ
 /// </summary>
 /// <param name="depthSpacePoint"></param>
 /// <param name="depth"></param>
 /// <returns></returns>
 public ColorSpacePoint MapDepthPointToColorSpace(DepthSpacePoint depthSpacePoint, ushort depth)
 {
     int index = (int)(this.depthWidth * depthSpacePoint.Y + depthSpacePoint.X);
     PointF lut = this.depthFrameToColorSpacfeTable[index];
     ColorSpacePoint tempP = new ColorSpacePoint();
     tempP.Y = lut.Y;
     tempP.X = lut.X + (float)(this.D / depth);
     return tempP;
 }
        void depthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            var depthFrame = e.FrameReference.AcquireFrame();
            if (depthFrame != null)
            {
                using (depthFrame)
                {
                    depthFrame.CopyFrameDataToIntPtr(depthImage.DataIntPtr, Kinect2Calibration.depthImageWidth*Kinect2Calibration.depthImageHeight*2);

                    // convert depth image coords to color image coords
                    int x = 100, y = 100;
                    ushort depthImageValue = depthImage[x, y]; // depth image values are in mm

                    if (depthImageValue == 0)
                    {
                        Console.WriteLine("Sorry, depth value input coordinates is zero");
                        return;
                    }

                    float depth = (float)depthImageValue / 1000f; // convert to m, to match our calibration and the rest of the Kinect SDK
                    double colorX, colorY;
                    calibration.DepthImageToColorImage(x, y, depth, out colorX, out colorY);

                    //// when converting many points, it may be faster to precompute pass in the distortion table:
                    //var depthFrameToCameraSpaceTable = calibration.ComputeDepthFrameToCameraSpaceTable();
                    //calibration.DepthImageToColorImage(x, y, depth, depthFrameToCameraSpaceTable, out colorX, out colorY);

                    Console.WriteLine("our color coordinates: {0} {1}", colorX, colorY);

                    // compare to Kinect SDK
                    var depthSpacePoint = new DepthSpacePoint();
                    depthSpacePoint.X = x;
                    depthSpacePoint.Y = y;
                    var colorSpacePoint = kinectSensor.CoordinateMapper.MapDepthPointToColorSpace(depthSpacePoint, depthImageValue);
                    Console.WriteLine("SDK's color coordinates: {0} {1}", colorSpacePoint.X, colorSpacePoint.Y);

                    // convert back to depth image
                    Matrix depthPoint;
                    double depthX, depthY;

                    calibration.ColorImageToDepthImage(colorX, colorY, depthImage, out depthPoint, out depthX, out depthY);

                    //// when converting many points, it may be faster to precompute and pass in the distortion table:
                    //var colorFrameToCameraSapceTable = calibration.ComputeColorFrameToCameraSpaceTable();
                    //calibration.ColorImageToDepthImage((int)colorX, (int)colorY, depthImage, colorFrameToCameraSapceTable, out depthPoint, out depthX, out depthY);

                    Console.WriteLine("convert back to depth: {0} {1}", depthX, depthY);
                }
            }

        
        
        }
        internal Finger(DepthPointEx point, CoordinateMapper coordinateMapper)
        {
            ushort depth = (ushort)point.Z;

            DepthPoint = new DepthSpacePoint
            {
                X = point.X,
                Y = point.Y
            };

            ColorPoint = coordinateMapper.MapDepthPointToColorSpace(DepthPoint, (ushort)point.Z);

            CameraPoint = coordinateMapper.MapDepthPointToCameraSpace(DepthPoint, (ushort)point.Z);
        }
        private static CalibrationRecord Calibrate(KinectSensor sensor)
        {
            int width = sensor.DepthFrameSource.FrameDescription.Width;
            int height = sensor.DepthFrameSource.FrameDescription.Height;

            ushort minDepth = sensor.DepthFrameSource.DepthMinReliableDistance;
            ushort maxDepth = sensor.DepthFrameSource.DepthMaxReliableDistance;

            var result = new CalibrationRecord();
            int nextDepth = minDepth;
            int depthIncrement = 777;
            if (depthIncrement >= maxDepth - minDepth || (maxDepth - minDepth) % depthIncrement == 0)
                throw new ArgumentException("Pick an increment which is less than, and not divisible by, maxDepth - minDepth");

            // 0 to 512
            for (int depthX = 0; depthX < width; depthX += 3)
            {
                // 0 to 424
                for (int depthY = 0; depthY < height; depthY += 3)
                {
                    // 500 to 4500
                    DepthSpacePoint depthPoint = new DepthSpacePoint
                    {
                        X = depthX,
                        Y = depthY
                    };

                    ColorSpacePoint colorPoint = sensor.CoordinateMapper.MapDepthPointToColorSpace(depthPoint, (ushort)nextDepth);
                    CameraSpacePoint bodyPoint = sensor.CoordinateMapper.MapDepthPointToCameraSpace(depthPoint, (ushort)nextDepth);

                    CalibrationPoint cpoint = new CalibrationPoint()
                    {
                        DepthPoint = depthPoint,
                        Depth = (ushort)nextDepth,
                        CameraPoint = bodyPoint,
                        ColorPoint = colorPoint
                    };

                    result.AddCalibrationPoint(cpoint);

                    nextDepth += depthIncrement;
                    if (nextDepth >= maxDepth) nextDepth -= maxDepth - minDepth;
                }
            }

            return result;
        }
示例#7
0
        void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var reference = e.FrameReference.AcquireFrame();

            // Infrared
            using (var infraFrame = reference.InfraredFrameReference.AcquireFrame())
            {
                if (infraFrame != null)
                {
                    RenderInfraredPixels(infraFrame);
                }
            }
            // Color
            using (var colorFrame = reference.ColorFrameReference.AcquireFrame())
            {
                // Depth
                using (var depthFrame = reference.DepthFrameReference.AcquireFrame())
                {
                    if (colorFrame != null && depthFrame != null)
                    {
                        var _colorWidth = colorFrame.ColorFrameSource.FrameDescription.Width;
                        var _colorHeight = colorFrame.ColorFrameSource.FrameDescription.Height;
                        var _depthWidth = depthFrame.DepthFrameSource.FrameDescription.Width;
                        var _depthHeight = depthFrame.DepthFrameSource.FrameDescription.Height;

                        using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                        {
                            // verify data and write the color data to the display bitmap
                            if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                                (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                            {
                                // Note: In order to see the full range of depth (including the less reliable far field depth)
                                // we are setting maxDepth to the extreme potential depth threshold
                                ushort maxDepth = ushort.MaxValue;

                                // If you wish to filter by reliable depth distance, uncomment the following line:
                                //// maxDepth = depthFrame.DepthMaxReliableDistance

                                this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                                this.RenderDepthPixels();
                            }
                        }

                        using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            this.colorBitmap.Lock();
                            // verify data and write the new color frame data to the display bitmap
                            if ((_colorWidth == this.colorBitmap.PixelWidth) && (_colorHeight == this.colorBitmap.PixelHeight))
                            {
                                colorFrame.CopyConvertedFrameDataToIntPtr(
                                    this.colorBitmap.BackBuffer,
                                    (uint)(_colorWidth * _colorHeight * 4),
                                    ColorImageFormat.Bgra);

                                this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));
                            }
                            this.colorBitmap.Unlock();
                        }

                        if ((takeScreenshot || dumpPpms) && !robot.IsMoving())
                        {
                            ushort[] depths = new ushort[_depthHeight * _depthWidth];

                            DepthSpacePoint[] mappedColor = new DepthSpacePoint[_colorHeight * _colorWidth];
                            depthFrame.CopyFrameDataToArray(depths);
                            cm.MapColorFrameToDepthSpace(depths, mappedColor);

                            byte[] colors = new byte[_colorHeight * _colorWidth * 4];
                            //this is the byte array that is converted into a ppm in the end, make it rgba form
                            colorFrame.CopyConvertedFrameDataToArray(colors, ColorImageFormat.Rgba);

                            this.mappedColor = mappedColor;
                            this.depths = depths;
                            this.colors = colors;

                            if (takeScreenshot)
                            {
                                ScreenshotSaveFile();
                                takeScreenshot = capturePanorama || false;
                            } else if (dumpPpms)
                            {
                                ScreenshotSaveFile();
                                //DumpPpms();
                                dumpPpms = false;
                            }

                            // Kick off another rotation if capturing a panorama
                            if (capturePanorama)
                            {
                                if (numRotations < MAX_ROTATIONS)
                                {
                                    numRotations++;
                                    RotateCW();
                                    StopMoving(rotateTime);
                                    Thread.Sleep(STABILIZE_TIME);

                                } else
                                {
                                    this.capturePanorama = false;
                                    this.takeScreenshot = false;
                                    this.panoramaNum++;
                                    this.imageNum = 0;
                                    this.numRotations = 0;
                                }
                            }
                        }
                        
                        depthCamera.Source = this.depthBitmap;
                        colorCamera.Source = this.colorBitmap;
                        infraCamera.Source = this.infraBitmap;
                    }
                }
            }      
        }
示例#8
0
        private unsafe void ProcessDepthFrameData(IntPtr depthFrameData, int frameSize, ushort minDepth, ushort maxDepth, DepthSpacePoint p, bool rec, bool left)
        {
            ushort* frameData = (ushort*)depthFrameData; // depth frame data is a 16 bit value
            ushort initDepth = frameData[depthFrameDescription.Width * ((int)p.Y) + ((int)p.X)];

            if (rec && (bool)chk_recDepth.IsChecked)
            {
                string file = "";
                //FileCode: [left/right]_[gestureNumber]_[sequence]_[sequneceIndex]
                if (left)   file = String.Format("c:/temp/PCD/pcd/dd_left_{0:00}_{1:00}_{2:00}.pcd", gestureNumber, sequenceID, depthFrameIndexL++);
                else        file = String.Format("c:/temp/PCD/pcd/dd_right_{0:00}_{1:00}_{2:00}.pcd", gestureNumber, sequenceID, depthFrameIndexR++);

                pcdData = new StreamWriter(file, false);
            }

            int distanceFactor = 80;
            int index = 0;
            currentFrame = new byte[windowSize * windowSize];

            for (int y = -frameSize; y < frameSize; y++)
            {
                for (int x = -frameSize; x < frameSize; x++)
                {
                    //Select index for smaller frame and get Depth value
                    int offset = (depthFrameDescription.Width * ((int)p.Y + y) + ((int)p.X + x));
                    ushort depth = frameData[offset];

                    bool isNearPalm = depth < initDepth + distanceFactor && depth > initDepth - distanceFactor;         
                    depth = isNearPalm ? (ushort)(depth + (depth - initDepth) * 10) : (ushort)0;
                    depthPixels[index] = currentFrame[index] = (byte)(depth / MapDepthToByte);
                    index++;

                    //  ==== Record DepthData for nextStep (Segmentation)
                    if ((bool)chk_recDepth.IsChecked && rec)
                    {
                        if (isNearPalm)
                        {
                            var point = Helper.depthToPCD(p.X + (float)x, p.Y + (float)y, depth);
                            pcdData.WriteLine(String.Format("{0} {1} {2}", point.X.ToString().Replace(',', '.'), point.Y.ToString().Replace(',', '.'), point.Z.ToString().Replace(',', '.')));
                            pcdData.Flush();
                        }
                    }
                }
            }

            if ((bool)chk_recDepth.IsChecked && rec)
                pcdData.Close();

            //============== Opt Flow ========
            var thisPreviousFrame = left ? previousFrameL : previousFrameR;
            Image<Gray, byte> prevImg = new Image<Gray, byte>(arrayToBitmap(thisPreviousFrame, frameSize * 2, frameSize * 2));
            Image<Gray, byte> currentImg = new Image<Gray, byte>(arrayToBitmap(currentFrame, frameSize * 2, frameSize * 2));
            Image<Gray, float> flowX = new Image<Gray, float>(new System.Drawing.Size(frameSize * 2, frameSize * 2));
            Image<Gray, float> flowY = new Image<Gray, float>(new System.Drawing.Size(frameSize * 2, frameSize * 2));
            var winSize = new System.Drawing.Size(5, 5);

            try
            {                
                currentImg = currentImg.SmoothMedian(5);
                OpticalFlow.LK(prevImg, currentImg, winSize, flowX, flowY);
                var bytes = (flowX.Convert<Gray, byte>() + flowY.Convert<Gray, byte>()).Bytes;
                var flow = new Image<Gray,byte>(frameSize * 2, frameSize * 2, new Gray (bytes.Sum(e => e) / bytes.Length));

                if (left)
                {
                    previousFrameL = currentFrame;
                    this.flowBitmapLeft.WritePixels(new Int32Rect(0, 0, flow.Bitmap.Width, flow.Bitmap.Height), flow.Bytes, flow.Bitmap.Width, 0);
                }
                else
                {
                    previousFrameR = currentFrame;
                    this.flowBitmapRight.WritePixels(new Int32Rect(0, 0, flow.Bitmap.Width, flow.Bitmap.Height), flow.Bytes, flow.Bitmap.Width, 0);
                }
            }
            catch { Console.WriteLine("Optical Flow Exception"); }
            //============== OF ========
        }
示例#9
0
        public KinectApp()
        {
            this.kinectSensor = KinectSensor.GetDefault();

            this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription;
            this.depthFrameReader = this.kinectSensor.DepthFrameSource.OpenReader();
            this.depthFrameReader.FrameArrived += depthFrameReader_FrameArrived;
            this.depthBitmapLeft = new WriteableBitmap(windowSize, windowSize, 96.0, 96.0, PixelFormats.Gray8, null);
            this.depthBitmapRight = new WriteableBitmap(windowSize, windowSize, 96.0, 96.0, PixelFormats.Gray8, null);
            this.flowBitmapLeft = new WriteableBitmap(windowSize, windowSize, 96.0, 96.0, PixelFormats.Gray8, null);
            this.flowBitmapRight = new WriteableBitmap(windowSize, windowSize, 96.0, 96.0, PixelFormats.Gray8, null);

            this.depthPixels = new byte[windowSize * windowSize];

            this.colorFrameDescription = this.kinectSensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra);
            this.colorFrameReader = this.kinectSensor.ColorFrameSource.OpenReader();
            this.colorFrameReader.FrameArrived += this.colorFrameReader_FrameArrived;     
            this.colorBitmap = new WriteableBitmap(colorFrameDescription.Width, colorFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null);

            this.colorFrameData = new byte[colorFrameDescription.Width * colorFrameDescription.Height * BytesPerPixel];

            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();
            this.bodyFrameReader.FrameArrived += bodyFrameReader_FrameArrived;
            
            this.bodyDrawingGroup = new DrawingGroup();
            this.imageSource = new DrawingImage(bodyDrawingGroup);

            leftHandPostition = new CameraSpacePoint();
            rightHandPostition = new CameraSpacePoint();
            pl_old = new DepthSpacePoint() { X = depthFrameDescription.Width / 2, Y = depthFrameDescription.Height / 2 };
            pr_old = new DepthSpacePoint() { X = depthFrameDescription.Width / 2, Y = depthFrameDescription.Height / 2 };

            this.coordinateMapper = this.kinectSensor.CoordinateMapper;

            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            this.kinectSensor.Open();

            this.DataContext = this;

            this.InitializeComponent();
        }
示例#10
0
 /// <summary>
 /// Calculates the angle and updates the arc according to the specifed points.
 /// </summary>
 /// <param name="start">The starting point.</param>
 /// <param name="middle">The middle point.</param>
 /// <param name="end">The end point.</param>
 /// <param name="desiredRadius">The desired arc radius.</param>
 public void Update(DepthSpacePoint start, DepthSpacePoint middle, DepthSpacePoint end, double desiredRadius = 0)
 {
     Update(start.ToVector3(), middle.ToVector3(), end.ToVector3(), desiredRadius);
 }
示例#11
0
        private void depthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        if ((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))
                        {
                            //Frame handler
                            if (btn_record.Content.Equals("Stop"))
                            {
                                lbl_fpsDepth.Content = Convert.ToUInt64(lbl_fpsBody.Content) + 1;
                                depthFrameSelector++;
                            }
                            bool record = depthFrameSelector == depthFrameThreshold;
                            if (record) depthFrameSelector = 0;

                            depthFrameSelector++;
                            if (depthFrameSelector != depthFrameThreshold) return;  //########### LÖSCHEN (nut für test)

                            int frame = windowSize / 2;
                            DepthSpacePoint pl = coordinateMapper.MapCameraPointToDepthSpace(leftHandPostition);
                            DepthSpacePoint pr = coordinateMapper.MapCameraPointToDepthSpace(rightHandPostition);

                            // === Links
                            if (pl.X <= frame || pl.X >= depthFrameDescription.Width - frame || pl.Y <= frame || pl.Y >= depthFrameDescription.Height - frame)
                                pl = pl_old;

                            ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, frame, 0, ushort.MaxValue, pl, record, true);
                            this.depthBitmapLeft.WritePixels(
                                new Int32Rect(0, 0, this.depthBitmapLeft.PixelWidth, this.depthBitmapLeft.PixelHeight),
                                this.depthPixels,
                                this.depthBitmapLeft.PixelWidth,
                                0);
                            pl_old = pl;


                            // === Rechts
                            if (pr.X <= frame || pr.X >= depthFrameDescription.Width - frame || pr.Y <= frame || pr.Y >= depthFrameDescription.Height - frame)
                                pr = pr_old;

                            ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, frame, 0, ushort.MaxValue, pr, record, false);
                            this.depthBitmapRight.WritePixels(
                                new Int32Rect(0, 0, this.depthBitmapRight.PixelWidth, this.depthBitmapRight.PixelHeight),
                                this.depthPixels,
                                this.depthBitmapRight.PixelWidth,
                                0);
                            pr_old = pr;
                        }                                           
                    }
                }
            }
        }
示例#12
0
        public Task<byte[]> FilterGPUAsync(byte[] bgra, ushort[] depth, DepthSpacePoint[] depthSpaceData,
            int nearThresh, int farThresh, int haloSize)
        {
            throw new NotImplementedException();

            return Task.Run(() => FilterGPU(bgra, depth, depthSpaceData, nearThresh, farThresh, haloSize));
        }
示例#13
0
        private void depthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        if ((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))
                        {
                            int frame = windowSize / 2;
                            DepthSpacePoint pl = coordinateMapper.MapCameraPointToDepthSpace(leftHandPostition);
                            DepthSpacePoint pr = coordinateMapper.MapCameraPointToDepthSpace(rightHandPostition);

                            //Draw recordState
                            bool record = btn_record.Content.Equals("Stop");
                            if (record)
                            {
                                lbl_fpsDepth.Content = Convert.ToUInt64(lbl_fpsBody.Content) + 1;
                            }

                            // === Links
                            if (pl.X <= frame || pl.X >= depthFrameDescription.Width - frame || pl.Y <= frame || pl.Y >= depthFrameDescription.Height - frame)
                                pl = pl_old;

                            ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, frame, 0, ushort.MaxValue, pl, record, true);
                            this.depthBitmapLeft.WritePixels(
                                new Int32Rect(0, 0, this.depthBitmapLeft.PixelWidth, this.depthBitmapLeft.PixelHeight),
                                this.depthPixels,
                                this.depthBitmapLeft.PixelWidth,
                                0);

                            using (DrawingContext dc = leftHandDrawingGroup.Open())
                            {
                                dc.DrawImage(LeftHandSource, new Rect(0.0, 0.0, depthFrameDescription.Width, depthFrameDescription.Height));
                            }
                            pl_old = pl;


                            // === Rechts
                            if (pr.X <= frame || pr.X >= depthFrameDescription.Width - frame || pr.Y <= frame || pr.Y >= depthFrameDescription.Height - frame)
                                pr = pr_old;

                            ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, frame, 0, ushort.MaxValue, pr, record, false);
                            this.depthBitmapRight.WritePixels(
                                new Int32Rect(0, 0, this.depthBitmapRight.PixelWidth, this.depthBitmapRight.PixelHeight),
                                this.depthPixels,
                                this.depthBitmapRight.PixelWidth,
                                0);

                            using (DrawingContext dc = rightHandDrawingGroup.Open())
                            {
                                dc.DrawImage(RightHandSource, new Rect(0.0, 0.0, depthFrameDescription.Width, depthFrameDescription.Height));
                            }
                            pr_old = pr;
                        }
                    }
                }
            }
        }
示例#14
0
        /// <summary>
        /// Calculates the angle between the specified points.
        /// </summary>
        /// <param name="center">The center of the angle.</param>
        /// <param name="start">The start of the angle.</param>
        /// <param name="end">The end of the angle.</param>
        /// <returns>The angle, in degrees.</returns>
        public static double Angle(this DepthSpacePoint center, DepthSpacePoint start, DepthSpacePoint end)
        {
            Vector3D first = start.ToVector3() - center.ToVector3();
            Vector3D second = end.ToVector3() - center.ToVector3();

            return Vector3D.AngleBetween(first, second);
        }
 /// <summary>
 /// Updates the position of the cursor.
 /// </summary>
 /// <param name="point">The specified point in the depth space.</param>
 /// <param name="ratioX">The specified horizontal scale scale ratio.</param>
 /// <param name="ratioY">The specified vertical scale ratio.</param>
 public void Update(DepthSpacePoint point, double ratioX = 1.0, double ratioY = 1.0)
 {
     Update(point.X * ratioX, point.Y * ratioY);
 }
        private void RecordData(RectI faceRegion, FaceFrame faceFrame)
        {
            //record the R, G, B, IR values from the Face Region pixels
            using (var irFrame = faceFrame.InfraredFrameReference.AcquireFrame())
            {
                using (var depthFrame = faceFrame.DepthFrameReference.AcquireFrame())
                {
                    using (var colorFrame = faceFrame.ColorFrameReference.AcquireFrame())
                    {
                        if ((null == irFrame) || (null == colorFrame) || (null == depthFrame)) return;

                        DepthSpacePoint[] depthSpacePoints = new DepthSpacePoint[colorFrame.FrameDescription.Height * colorFrame.FrameDescription.Width];

                        FrameDescription depthFrameDescription = depthFrame.FrameDescription;

                        // Access the depth frame data directly via LockImageBuffer to avoid making a copy
                        using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer())
                        {
                            this.m_CoordMapper.MapColorFrameToDepthSpaceUsingIntPtr(
                                depthFrameData.UnderlyingBuffer,
                                depthFrameData.Size,
                                depthSpacePoints);
                        }

                        //Get the pixels
                        m_colorBitmap.Lock();
                        unsafe
                        {
                            byte* pixels = (byte*)m_colorBitmap.BackBuffer;

                            var startPixel = faceRegion.Left * faceRegion.Top;
                            var endPixel = faceRegion.Right * faceRegion.Bottom;

                            float alpha = 0;
                            float red = 0;
                            float green = 0;
                            float blue = 0;
                            float ir = 0;

                            ushort[] irFrameData = new ushort[irFrame.FrameDescription.Height * irFrame.FrameDescription.Width];
                            irFrame.CopyFrameDataToArray(irFrameData);

                            //Now get the Red, Green, Blue Pixels for the region
                            for (int i = startPixel; i < endPixel; i += 4)
                            {
                                //var pixel = pixels[i];
                                int irIndex = (int)depthSpacePoints[i].X * (int)depthSpacePoints[i].Y;

                                blue += pixels[i]; // << 24;
                                green += pixels[i + 1]; // << 16;
                                red += pixels[i + 2];// << 8;
                                alpha += pixels[i + 3];
                                if (irIndex < irFrameData.Length)
                                    ir += irFrameData[irIndex];
                                else
                                    ir += 0;
                            }
                            float size = Math.Abs(startPixel - endPixel);

                            float avg_alpha = alpha / size;
                            float avg_red = red / size;
                            float avg_green = green / size;
                            float avg_blue = blue / size;
                            float avg_ir = ir / size;

                            double std_alpha = 0;
                            double std_red = 0;
                            double std_green = 0;
                            double std_blue = 0;
                            double std_ir = 0;

                            double var_alpha = 0;
                            double var_red = 0;
                            double var_green = 0;
                            double var_blue = 0;
                            double var_ir = 0;

                            //Now calculate standard deviation
                            for (int i = startPixel; i < endPixel; i += 4)
                            {
                                //var pixel = pixels[i];
                                var_blue = (double)(pixels[i] - avg_blue);
                                std_blue += Math.Pow(var_blue, 2.0);

                                var_green = (pixels[i + 1] - avg_green);
                                std_green += Math.Pow(var_green, 2);

                                var_red = (pixels[i + 2] - avg_red);
                                std_red += Math.Pow(var_red, 2);

                                var_alpha = (pixels[i + 3] - avg_alpha);
                                std_alpha += Math.Pow(var_alpha, 2);

                                int irIndex = (int)depthSpacePoints[i].X * (int)depthSpacePoints[i].Y;
                                if (irIndex < irFrameData.Length)
                                    var_ir = irFrameData[irIndex] - avg_ir;
                                else
                                    var_ir = avg_ir;

                                std_ir += Math.Pow(var_ir, 2);

                            }

                            std_alpha = Math.Sqrt(std_alpha / size);
                            std_red = Math.Sqrt(std_red / size);
                            std_green = Math.Sqrt(std_green / size);
                            std_blue = Math.Sqrt(std_blue / size);
                            std_ir = Math.Sqrt(std_ir / size);

                            double prime_alpha = 0;
                            double prime_red = 0;
                            double prime_green = 0;
                            double prime_blue = 0;
                            double prime_ir = 0;

                            //Now calculate standard deviation
                            for (int i = startPixel; i < endPixel; i += 4)
                            {
                                //var pixel = pixels[i];
                                var_blue = (double)(pixels[i] - avg_blue);
                                prime_blue += var_blue / std_blue;

                                var_green = (pixels[i + 1] - avg_green);
                                prime_green += var_green / std_green;

                                var_red = (pixels[i + 2] - avg_red);
                                prime_red += var_red / std_red;

                                var_alpha = (pixels[i + 3] - avg_alpha);
                                prime_alpha += var_alpha / std_alpha;

                                int irIndex = (int)depthSpacePoints[i].X * (int)depthSpacePoints[i].Y;
                                if (irIndex < irFrameData.Length)
                                    var_ir = irFrameData[irIndex] - avg_ir;
                                else
                                    var_ir = avg_ir;

                                prime_ir += var_ir / std_ir;
                            }

                            double norm_alpha = prime_alpha / size;
                            double norm_red = prime_red / size;
                            double norm_blue = prime_blue / size;
                            double norm_green = prime_green / size;
                            double norm_ir = prime_ir / size;

                            Plot(norm_red, norm_green, norm_blue, norm_ir);

                            jadeCalculation.Write(
                                m_secondsElapsed.ElapsedMilliseconds,
                                norm_alpha, norm_red, norm_green, norm_blue, norm_ir);

                        }
                        m_colorBitmap.Unlock();
                    }
                }
            }
        }
示例#17
0
        /// <summary>
        /// Update the joint position based on the referenced <c>IJoint</c>.
        /// </summary>
        public virtual void Update(IJoint joint)
        {
            if (this.JointType != joint.JointType)
                throw new Exception("Cannot Update with Joint of a different Type");

            _position = joint.Position;
            _depthPosition = new DepthSpacePoint();
            _colorPosition = new ColorSpacePoint();
            _trackingState = joint.TrackingState;
        }
示例#18
0
        private void showHands(DepthSpacePoint rightHand, DepthSpacePoint leftHand, HandState rightHandState,
            HandState leftHandState)
        {
            var rightBrush = decideHandBrush(rightHandState);
            drawCircle(50, rightHand.X, rightHand.Y, rightBrush);

            var leftBrush = decideHandBrush(leftHandState);
            drawCircle(50, leftHand.X, leftHand.Y, leftBrush);
        }
示例#19
0
        public CameraSpacePoint[] GenerateFullPointCloud()
        {
            int width = DepthFrameDescription.Width;
            int height = DepthFrameDescription.Height;
            int frameSize = width * height;
            FullPointCloud = new CameraSpacePoint[frameSize];
            DepthSpacePoint[] allDepthSpacePoints = new DepthSpacePoint[frameSize];

            ushort[] depths = new ushort[frameSize];

            for (int i = 0; i < height; ++i)
            {
                for (int j = 0; j < width; ++j)
                {
                    int index = i * width + j;
                    allDepthSpacePoints[index] = new DepthSpacePoint { X = j, Y = i };
                    FullPointCloud[index] = new CameraSpacePoint();
                    depths[index] = depthArray[index];
                }
            }

            kinectSensor.CoordinateMapper.MapDepthPointsToCameraSpace(allDepthSpacePoints, depths, FullPointCloud);

            return FullPointCloud;
        }
示例#20
0
 public Location transformToLocation(DepthSpacePoint point, ushort[] depthData, bool isRelative = false)
 {
     return transformToLocation(point.X, point.Y, depthData, isRelative);
 }
 /// <summary>
 /// 縮小されてた場合に対応する深度TOカラー変換
 /// </summary>
 /// <param name="depthSpacePoint"></param>
 /// <param name="depth"></param>
 /// <param name="colorWidth"></param>
 /// <param name="colorHeight"></param>
 /// <returns></returns>
 public ColorSpacePoint MapDepthPointToColorSpace(DepthSpacePoint depthSpacePoint, ushort depth, int colorWidth, int colorHeight)
 {
     ColorSpacePoint csp = this.MapDepthPointToColorSpace(depthSpacePoint, depth);
     csp.X = csp.X * colorWidth / this.originalColorWidth;
     csp.Y = csp.Y * colorHeight / this.originalColorHeight;
     return csp;
 }
示例#22
0
        public byte[] FilterGPU(byte[] bgra, ushort[] depth, DepthSpacePoint[] depthSpaceData,
            int nearThresh, int farThresh, int haloSize)
        {
            if (computeShader == null)
            {
                return new byte[0];
            }

            // Initialize last frame with current color frame, if it was reset
            if (bLastFrameReset)
            {
                lastFramePixels = bgra;
                bLastFrameReset = false;
            }

            // -- Create halo array --

            List<int2> halo = new List<int2>();

            int s = haloSize;
            int xd = s;
            int yd = s / 2;
            int S = (xd + yd) / 2;
            int x0 = -xd;
            int x1 = +xd;
            int y0 = -yd;
            int y1 = +yd;
            int actualHaloSize = 0;
            for (int y = y0; y < y1; ++y)
            {
                for (int x = x0; x < x1; ++x)
                {
                    if (Math.Abs(x) + Math.Abs(y) <= S)
                    {
                        halo.Add(new int2(x, y));
                        ++actualHaloSize;
                    }
                }
            }

            // --

            // -- Perform data transformations so the arrays can be passed to the GPU --

            var bgraDataTransformed = new int4[1920 * 1080];
            for (int i = 0, j = 0; i < bgra.Length; i += 4, ++j)
            {
                bgraDataTransformed[j] = new int4(bgra[i], bgra[i + 1], bgra[i + 2], bgra[i + 3]);
            }

            var lastFrameDataTransformed = new int4[1920 * 1080];
            for (int i = 0, j = 0; i < bgra.Length; i += 4, ++j)
            {
                lastFrameDataTransformed[j] = new int4(lastFramePixels[i], lastFramePixels[i + 1], lastFramePixels[i + 2], lastFramePixels[i + 3]);
            }

            // --

            //var sw = Stopwatch.StartNew();

            // Create a constant buffer to pass the filter configuration
            var cbuffer = GPGPUHelper.CreateConstantBuffer(device, new int[] { nearThresh, farThresh, haloSize });

            // -- Create GPULists using the immediate context and pass the data --

            GPUList<int4> bgraData = new GPUList<int4>(device.ImmediateContext);
            bgraData.AddRange(bgraDataTransformed);

            GPUList<uint> depthData = new GPUList<uint>(device.ImmediateContext);
            depthData.AddRange(depth.Select(d => (uint)d));

            GPUList<DepthSpacePoint> depthSpacePointData = new GPUList<DepthSpacePoint>(device.ImmediateContext, depthSpaceData);
            //depthSpacePointData.AddRange(depthSpaceData.Select(dsp => {

            //    if (dsp.X == float.NegativeInfinity || dsp.Y == -float.NegativeInfinity)
            //    {
            //        return new DepthSpacePoint() { X = -1, Y = -1 };
            //    }
            //    else
            //    {
            //        return dsp;
            //    }
            //}));

            GPUList<int4> lastFrameData = new GPUList<int4>(device.ImmediateContext);
            lastFrameData.AddRange(lastFrameDataTransformed);

            var resultArray = new int4[1920 * 1080];
            GPUList<int4> resultData = new GPUList<int4>(device.ImmediateContext, resultArray);

            GPUList<int2> haloData = new GPUList<int2>(device.ImmediateContext, halo);

            // --

            var sw = Stopwatch.StartNew();

            // Set the buffers and uavs
            device.ImmediateContext.ComputeShader.Set(computeShader);
            device.ImmediateContext.ComputeShader.SetConstantBuffer(cbuffer, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(bgraData.UnorderedAccess, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(depthData.UnorderedAccess, 1);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(depthSpacePointData.UnorderedAccess, 2);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(lastFrameData.UnorderedAccess, 3);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(resultData.UnorderedAccess, 4);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(haloData.UnorderedAccess, 5);

            // Run the compute shader
            device.ImmediateContext.Dispatch(1920 * 1080 / 256, 1, 1);

            // Get result. This call blocks, until the result was calculated
            // because the MapSubresource call waits.
            var result = resultData.ToArray();

            sw.Stop();

            // -- Clean up --

            device.ImmediateContext.ComputeShader.SetConstantBuffer(null, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 0);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 1);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 2);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 3);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 4);
            device.ImmediateContext.ComputeShader.SetUnorderedAccessView(null, 5);

            cbuffer.Dispose();
            bgraData.Dispose();
            depthData.Dispose();
            depthSpacePointData.Dispose();
            lastFrameData.Dispose();
            resultData.Dispose();
            haloData.Dispose();

            // --

            Debug.WriteLine($"Filtering took {sw.ElapsedMilliseconds} ms");

            var resultBytes = new byte[1920 * 1080 * 4];

            for (int i = 0, j = 0; i < resultBytes.Length; i += 4, ++j)
            {
                resultBytes[i] = (byte)result[j].x;
                resultBytes[i+1] = (byte)result[j].y;
                resultBytes[i+2] = (byte)result[j].z;
                resultBytes[i+3] = (byte)result[j].a;
            }

            lastFramePixels = resultBytes;

            return resultBytes;
        }
示例#23
0
 public byte[] Filter(byte[] bgra, ushort[] depth, DepthSpacePoint[] depthSpaceData,
     int nearThresh, int farThresh, int haloSize)
 {
     switch (_FilterMode)
     {
         case FilterMode.CPU:
             return FilterCPU(bgra, depth, depthSpaceData, nearThresh, farThresh, haloSize);
         case FilterMode.GPU:
             return FilterGPU(bgra, depth, depthSpaceData, nearThresh, farThresh, haloSize);
         default:
             return new byte[0];
     }
 }
        private void DrawEllipse(DepthSpacePoint point, Brush brush, double radius)
        {
            Ellipse ellipse = new Ellipse
            {
                Width = radius,
                Height = radius,
                Fill = brush
            };

            canvas.Children.Add(ellipse);

            Canvas.SetLeft(ellipse, point.X - radius / 2.0);
            Canvas.SetTop(ellipse, point.Y - radius / 2.0);
        }
示例#25
0
        private async Task DrawColorCoodinate()
        {
            // カラー画像の解像度でデータを作る
            var colorImageBuffer = new byte[colorFrameDesc.LengthInPixels *
                                            colorFrameDesc.BytesPerPixel];

            // 変換処理が重いため、非同期に行う
            await Task.Factory.StartNew( () =>
            {
                // カラー座標系に対応するDepth座標系の一覧を取得する
                var depthSpace = new DepthSpacePoint[colorFrameDesc.LengthInPixels];
                kinect.CoordinateMapper.MapColorFrameToDepthSpace( depthBuffer, depthSpace );

                // 並列で処理する
                Parallel.For( 0, colorFrameDesc.LengthInPixels, i =>
                {
                    int depthX = (int)depthSpace[i].X;
                    int depthY = (int)depthSpace[i].Y;
                    if ( (depthX < 0) || (depthFrameDesc.Width <= depthX) ||
                             (depthY < 0) || (depthFrameDesc.Height <= depthY) ) {
                        return;
                    }

                    // Depth座標系のインデックス
                    int depthIndex = (depthY * depthFrameDesc.Width) + depthX;
                    int bodyIndex = bodyIndexBuffer[depthIndex];

                    // 人を検出した位置だけ色を付ける
                    if ( bodyIndex == 255 ) {
                        return;
                    }

                    // カラー画像を設定する
                    int colorImageIndex = (int)(i * colorFrameDesc.BytesPerPixel);
                    colorImageBuffer[colorImageIndex + 0] = colorBuffer[colorImageIndex + 0];
                    colorImageBuffer[colorImageIndex + 1] = colorBuffer[colorImageIndex + 1];
                    colorImageBuffer[colorImageIndex + 2] = colorBuffer[colorImageIndex + 2];
                } );
            } );

            // ビットマップにする
            var colorBitmap = new WriteableBitmap( colorFrameDesc.Width,
                                                   colorFrameDesc.Height );
            var stream = colorBitmap.PixelBuffer.AsStream();
            stream.Write( colorImageBuffer, 0, colorImageBuffer.Length );
            colorBitmap.Invalidate();
            ImageColor.Source = colorBitmap;
        }
示例#26
0
        // Note: In order to see the full range of depth (including the less reliable far field depth) we are setting maxDepth (ushort.MaxValue) to the extreme potential depth threshold
        // If you wish to filter by reliable depth distance, use:  depthFrame.DepthMaxReliableDistance
        private unsafe void ProcessDepthFrameData(IntPtr depthFrameData, int frameSize, ushort minDepth, ushort maxDepth, DepthSpacePoint p, bool rec, bool left)
        {
            string file = "";
            if (depthFrameSelector == 15 && rec)
            {
                if (left) 
                    file = String.Format("c:/temp/SLRS/hands/depthDataLeft_{0}_{1}_{2}.txt", gestureWord[gestureNumber], sequenceID, depthFrameIndexL++);
                else
                    file = String.Format("c:/temp/SLRS/hands/depthDataRight_{0}_{1}_{2}.txt", gestureWord[gestureNumber], sequenceID, depthFrameIndexR++);

                depthData = new StreamWriter(file, true);
                Helper.writePCDHeader(depthData);
            }

            ushort* frameData = (ushort*)depthFrameData; // depth frame data is a 16 bit value
            ushort initDepth = frameData[depthFrameDescription.Width * ((int)p.Y) + ((int)p.X)];
            byte initPos = (byte)(initDepth / MapDepthToByte);

            int cloudPointCount = 0;
            int factor = 80;
            int index = 0;
            for (int y = -frameSize; y < frameSize; y++)
            {
                for (int x = -frameSize; x < frameSize; x++)
                {                    
                    //Select index for smaller frame and get Depth value
                    int i = (depthFrameDescription.Width * ((int)p.Y + y) + ((int)p.X + x));
                    ushort depth = frameData[i];

                    // if this depth is near to the initpoint (handpalm) --> record and adapt depth map for visualization
                    if (depth < initDepth + factor && depth > initDepth - factor)
                    {
                        if (depthFrameSelector == 15 && rec)
                        {
                            var point = Helper.depthToPCD(frameSize, p.X + x, p.Y + y, depth);
                            depthData.WriteLine(String.Format("{0} {1} {2}", point.X, point.Y, point.Z));
                            cloudPointCount++;
                        }

                        depth += (ushort)((depth - initDepth) * 10);
                    }
                    else
                        depth = 0;

                    this.depthPixels[index++] = (byte)(depth / MapDepthToByte);
                    //byte greyValue = (byte)(depth >= minDepth && depth <= maxDepth ? (depth / MapDepthToByte) : 0);
                    //this.depthPixels[index++] = (byte)greyValue;//(255 - greyValue);
                }
            }

            if (depthFrameSelector == 15 && rec)
            {
                depthData.Flush();
                depthData.Close();

                FileStream fs = new FileStream(file);
                string heigth = cloudPointCount.ToString();
                while (!fs.EndOfStream)
                {
                    var line = fs.ReadLine();
                    if (line.Contains("WIDTH"))
                        fs.Write(Encoding.ASCII.GetBytes(height), fs.Position+1, height.Lenght);
                    if (line.Contains("POINTS")) {
                        fs.Write(Encoding.ASCII.GetBytes(height), fs.Position+1, height.Lenght);
                        break;
                    }
                }

                fs.Flush();
                fs.Close();

                depthFrameSelector = 0;
            }

            if (rec) depthFrameSelector++;
        }
示例#27
0
        public Point MapJointToDepth(KinectBase.Joint joint, bool undoTransform)
        {
            //TODO: Update this so it takes a joint array instead of a single joint (this is supposed to be more efficient for the Kinect 2)
            Point mappedPoint = new Point(0, 0);
            Point3D transformedPosition = joint.Position;

            if (undoTransform)
            {
                Matrix3D inverseTransform = skeletonTransformation;
                inverseTransform.Invert();
                transformedPosition = inverseTransform.Transform(transformedPosition);
            }

            //Setup the Kinect v2 objects to do the transformation
            CameraSpacePoint[] skelPoints = new CameraSpacePoint[1];
            skelPoints[0] = new CameraSpacePoint();
            skelPoints[0].X = (float)transformedPosition.X;
            skelPoints[0].Y = (float)transformedPosition.Y;
            skelPoints[0].Z = (float)transformedPosition.Z;
            DepthSpacePoint[] points = new DepthSpacePoint[1];
            kinect.CoordinateMapper.MapCameraPointsToDepthSpace(skelPoints, points);

            //Convert back to the base object type
            mappedPoint.X = points[0].X;
            mappedPoint.Y = points[0].Y;

            return mappedPoint;
        }
        // --- color
        private void ProcessColorMapping(MultiFrame multiFrame)
        {
            // --- color mapping from kinect
            long ticks1 = DateTime.Now.Ticks;
            DepthSpacePoint[] colorMappedToDepthPoints = new DepthSpacePoint[colorWidth * colorHeight];
            coordMapper.MapColorFrameToDepthSpace(multiFrame.DepthData, colorMappedToDepthPoints);            
            Utils.UpdateTimer("(ColorMappingCoord)", ticks1);

            // --- mapped colorAsDepth -> depth
            long ticks2 = DateTime.Now.Ticks;
            byte[] colorMappedBytes = new byte[colorWidth * colorHeight * 4];

            unsafe
            {
                fixed (byte* fixedColorMapped = colorMappedBytes)
                fixed (ushort* fixedDepthData = multiFrame.DepthData)
                fixed (byte* fixedBodyIndexData = multiFrame.BodyIndexData)
                {
                    byte* ptrColorMapped = fixedColorMapped;
                    ushort* ptrDepthData = fixedDepthData;
                    byte* ptrBodyIndexData = fixedBodyIndexData;

                    // 8 bit
                    if (Context.Use8bitDepth)
                    {
                        for (int i = 0; i < colorPixelSize; i++)                                               
                        {
                            // checking infinity before adding + 0.5f is about 5x faster (!!)
                            float xTmp = colorMappedToDepthPoints[i].X;
                            float yTmp = colorMappedToDepthPoints[i].Y;

                            int x = float.IsInfinity(xTmp) ? -1 : (int)(xTmp + 0.5f);
                            int y = float.IsInfinity(yTmp) ? -1 : (int)(yTmp + 0.5f);

                            if (x >= 0 && x < depthWidth && y >= 0 && y < depthHeight)
                            {
                                int idx = x + y * depthWidth;
                                byte val = (ptrBodyIndexData[idx] < 6 ? byte.MaxValue : (byte)(ptrDepthData[idx] / depthToByte));
                                *ptrColorMapped++ = val;
                                *ptrColorMapped++ = val;
                                *ptrColorMapped++ = val;
                                *ptrColorMapped++ = 255;            // alpha
                            }
                            else
                            {
                                ptrColorMapped += 4;                // 0 is default
                            }
                        }
                    }
                    // full depth (13 bit)
                    else
                    {
                        for (int i = 0; i < colorPixelSize; i++)
                        {
                            // checking infinity before adding + 0.5f is about 5x faster (!!)
                            float xTmp = colorMappedToDepthPoints[i].X;
                            float yTmp = colorMappedToDepthPoints[i].Y;

                            int x = float.IsInfinity(xTmp) ? -1 : (int)(xTmp + 0.5f);
                            int y = float.IsInfinity(yTmp) ? -1 : (int)(yTmp + 0.5f);

                            if (x >= 0 && x < depthWidth && y >= 0 && y < depthHeight)
                            {
                                int idx = x + y * depthWidth;
                                ushort val = ptrDepthData[idx];
                                *ptrColorMapped++ = (byte)(val % 256);
                                *ptrColorMapped++ = (byte)(val / 256);
                                *ptrColorMapped++ = ptrBodyIndexData[idx];
                                *ptrColorMapped++ = 255;            // alpha
                            }
                            else
                            {
                                ptrColorMapped += 4;                // 0 is default
                            }
                        }
                    }
                }
            }            
            multiFrame.ColorMappedBytes = colorMappedBytes;
            Utils.UpdateTimer("(ColorMappingBytes)", ticks2);
        }
示例#29
0
        unsafe private void ShowMappedColorBackgroundRemoved(DepthSpacePoint[] colorMappedToDepthPoints, ushort[] depthFrameData, FrameDescription frameDescription)
        {
            fixed (DepthSpacePoint* colorMappedToDepthPointsPointer = colorMappedToDepthPoints)
            {
                IBufferByteAccess bitmapBackBufferByteAccess = (IBufferByteAccess)this.bitmap.PixelBuffer;

                byte* bitmapBackBufferBytes = null;
                bitmapBackBufferByteAccess.Buffer(out bitmapBackBufferBytes);

                // Treat the color data as 4-byte pixels
                uint* bitmapPixelsPointer = (uint*)bitmapBackBufferBytes;

                int depthWidth = frameDescription.Width;
                int depthHeight = frameDescription.Height;

                // Loop over each row and column of the color image
                // Zero out any pixels that don't correspond to a body index
                for (int colorIndex = 0; colorIndex < this.colorMappedToDepthPoints.Length; ++colorIndex)
                {
                    float colorMappedToDepthX = colorMappedToDepthPoints[colorIndex].X;
                    float colorMappedToDepthY = colorMappedToDepthPoints[colorIndex].Y;

                    // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel.
                    if (!float.IsNegativeInfinity(colorMappedToDepthX) &&
                        !float.IsNegativeInfinity(colorMappedToDepthY))
                    {
                        // Make sure the depth pixel maps to a valid point in color space
                        int depthX = (int)(colorMappedToDepthX + 0.5f);
                        int depthY = (int)(colorMappedToDepthY + 0.5f);

                        // If the point is not valid, there is no body index there.
                        if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight))
                        {
                            int depthIndex = (depthY * depthWidth) + depthX;

                            if (depthFrameData[depthIndex] < DepthMax)
                            {
                                continue;
                            }
                        }
                    }
                    // no matching depth. zero out the pixel.
                    bitmapPixelsPointer[colorIndex] = 0;
                }
            }
            this.bitmap.Invalidate();
            FrameDisplayImage.Source = this.bitmap;
        }
        private void PostProc()
        {
            if (!TrackDepth || CameraPoints == null) return;
            if (Points==null)
            {
                Points = new KinectPoint[DepthWidth, DepthHeight];
                for (int j=0; j<DepthHeight; ++j)
                {
                    for(int i=0; i<DepthWidth; ++i)
                    {
                        Points[i, j] = new KinectPoint();
                        Points[i, j].i = i;
                        Points[i, j].j = j;
                    }
                }
            }

            double mind = 0.8;
            double maxd = 5.0;
            double dmul = 1.0 / (maxd - mind);
            float mulcol = 1.0f / 255.0f;
            int k = 0;
            DepthSpacePoint dp = new DepthSpacePoint();

            for (int j = 0; j < DepthHeight; ++j)
            {
                for (int i = 0; i < DepthWidth; ++i)
                {
                    if (!double.IsNaN(CameraPoints[k].Z) && !double.IsInfinity(CameraPoints[k].Z))
                    {
                        Points[i, j].p = new OpenTK.Vector3d(CameraPoints[k].X, CameraPoints[k].Y, CameraPoints[k].Z);
                        Points[i, j].depthN = (Points[i, j].p.Z - mind) * dmul;
                        Points[i, j].isReliable = true;
                    }
                    else
                    {
                        dp.X = i;
                        dp.Y = j;
                        CameraSpacePoint cp= coordinateMapper.MapDepthPointToCameraSpace(dp, maxReliableDistance);

                        Points[i, j].p = new OpenTK.Vector3d(cp.X, cp.Y, cp.Z);
                        Points[i, j].depthN = (Points[i, j].p.Z - mind) * dmul;
                        Points[i, j].isReliable = false;
                    }
                    if (TrackColor)
                    {
                        Points[i, j].color.B = KColors[ColorIndex[k]*4]*mulcol;
                        Points[i, j].color.G = KColors[ColorIndex[k]*4+1] * mulcol;
                        Points[i, j].color.R = KColors[ColorIndex[k]*4+2] * mulcol;
                        Points[i, j].color.A = 1.0f;
                    }
                    else
                    {
                        Points[i, j].color.R = 1.0f;
                        Points[i, j].color.G = 1.0f;
                        Points[i, j].color.B = 1.0f;
                        Points[i, j].color.A = 1.0f;
                    }

                    k++;
                }
            }
        }