unsafe void UpdateFusionFrame()
        {
            //KinectのDepthImageデータからFusionのDepthImageFrameを作成
            reconstruction.DepthToDepthFloatFrame(depthImagePixels, depthImageFrame,
                                                  FusionDepthProcessor.DefaultMinimumDepth, FusionDepthProcessor.DefaultMaximumDepth, true);

            //depthImageFrameに平滑化をかけてsmoothDepthFrameに格納
            reconstruction.SmoothDepthFloatFrame(depthImageFrame, smoothDepthImageFrame, SmoothingKernelWidth, SmoothingDistanceThreshold);

            //色情報を取得するためにDepthImage各点のColorImageでの位置を計算
            ColorSpacePoint[] points = new ColorSpacePoint[depthWidth * depthHeight];
            coordinateMapper.MapDepthFrameToColorSpace(depthImagePixels, points);

            //colorImageFrameBufferに色情報を格納
            int[] colorImageFrameBuffer = new int[depthWidth * depthHeight];
            fixed(byte *ptrColorImagePixels = colorImagePixels)
            {
                int *rawColorPixels = (int *)ptrColorImagePixels;

                Parallel.For(0, depthHeight,
                             y =>
                {
                    for (int x = 0; x < depthWidth; x++)
                    {
                        int index             = y * depthWidth + x;
                        ColorSpacePoint point = points[index];
                        int colorX            = (int)(Math.Ceiling(point.X));
                        int colorY            = (int)(Math.Ceiling(point.Y));
                        if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                        {
                            colorImageFrameBuffer[index] = rawColorPixels[colorY * colorWidth + colorX];
                        }
                    }
                });
            }

            //colorImageFrameBufferからFusionのcolorImageFrameを作成
            colorImageFrame.CopyPixelDataFrom(colorImageFrameBuffer);

            //現在のカメラ位置や角度の情報を計算に反映させるため、worldToCameraTransformを最新に更新
            worldToCameraTransform = reconstruction.GetCurrentWorldToCameraTransform();

            //最新のsmoothDepthImageFrame,colorImageFrameをFusionのReconstructionオブジェクトに反映
            float alignmentEnergy = 0.0f;
            bool  ret             = reconstruction.ProcessFrame(smoothDepthImageFrame, colorImageFrame, FusionDepthProcessor.DefaultAlignIterationCount,
                                                                FusionDepthProcessor.DefaultIntegrationWeight, FusionDepthProcessor.DefaultColorIntegrationOfAllAngles,
                                                                out alignmentEnergy, worldToCameraTransform);

            //反映結果がエラーならカウントし、カウンタが100を越えたら初めからやり直す
            if (!ret)
            {
                if (++errorCount >= 100)
                {
                    errorCount = 0;
                    reset();
                }
            }
            //結果を格納
            result();
        }
    // Update is called once per frame
    void Update()
    {
        if (_Sensor == null || _ColorSourceManager == null || _DepthSourceManager == null)
        {
            return;
        }
        Mat colorMat = _ColorSourceManager.GetColorMat();

        ushort[] depthData = _DepthSourceManager.GetData();
        _colorTexture = _ColorSourceManager.GetColorTexture();
        if (_mapperMat == null)
        {
            _mapperMat = new Mat(_colorTexture.height, _colorTexture.width, CvType.CV_8UC3);
        }

        //mapper
        colorSpace = new ColorSpacePoint[depthData.Length];
        depthSpace = new DepthSpacePoint[_colorTexture.width * _colorTexture.height];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);
        _Mapper.MapColorFrameToDepthSpace(depthData, depthSpace);



        //_mapperMat = colorMat;
    }
Exemple #3
0
        private void CoodinateMapping()
        {
            // Depth座標系のカラー位置と、Depth座標系とカメラ座標系の対応を作る
            mapper.MapDepthFrameToColorSpace(depthBuffer, depthColorPoints);
            mapper.MapDepthFrameToCameraSpace(depthBuffer, cameraPoints);

            Parallel.For(0, depthFrameDesc.LengthInPixels, i =>
            {
                PointCloud[i].Clear();

                // カラーバッファの位置
                int colorX = (int)depthColorPoints[i].X;
                int colorY = (int)depthColorPoints[i].Y;
                if ((colorX < 0) || (colorFrameDesc.Width <= colorX) || (colorY < 0) || (colorFrameDesc.Height <= colorY))
                {
                    return;
                }

                PointCloud[i].X = cameraPoints[i].X;
                PointCloud[i].Y = cameraPoints[i].Y;
                PointCloud[i].Z = cameraPoints[i].Z;

                int colorIndex  = (int)(((colorY * colorFrameDesc.Width) + colorX) * colorFrameDesc.BytesPerPixel);
                PointCloud[i].B = colorBuffer[colorIndex + 0];
                PointCloud[i].G = colorBuffer[colorIndex + 1];
                PointCloud[i].R = colorBuffer[colorIndex + 2];
            });
        }
        /// <summary>
        /// Kinectにつないでる時だけ使えるコンストラクタ
        /// </summary>
        /// <param name="originalCoordinateMapper"></param>
        /// <param name="depthWidth"></param>
        /// <param name="depthHeight"></param>
        public LocalCoordinateMapper(CoordinateMapper originalCoordinateMapper, int depthWidth, int depthHeight)
        {
            this.depthWidth  = depthWidth;
            this.depthHeight = depthHeight;
            this.depthFrameToCameraSpaceTable = originalCoordinateMapper.GetDepthFrameToCameraSpaceTable();

            int length = depthWidth * depthHeight;

            ushort[]          depthBuffer      = new ushort[length];
            ColorSpacePoint[] colorSpacePoints = new ColorSpacePoint[length];
            this.depthFrameToColorSpacfeTable = new PointF[length];

            int depth = 1500; // なんでもいい

            for (int i = 0; i < length; i++)
            {
                depthBuffer[i] = (ushort)depth;
            }
            originalCoordinateMapper.MapDepthFrameToColorSpace(depthBuffer, colorSpacePoints);
            for (int i = 0; i < length; i++)
            {
                PointF tempP = new PointF();
                tempP.X = (float)(colorSpacePoints[i].X - D / depth);
                tempP.Y = colorSpacePoints[i].Y;
                this.depthFrameToColorSpacfeTable[i] = tempP;
            }
            Debug.WriteLine(1);
        }
Exemple #5
0
    private void RefreshData(ushort[] depthData, int colorWidth, int colorHeight)
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[] colorSpace = new ColorSpacePoint[depthData.Length];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);

        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int indexX     = x / _DownsampleSize;
                int indexY     = y / _DownsampleSize;
                int smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX;

                double avg = GetAvg(depthData, x, y, frameDesc.Width, frameDesc.Height);

                avg = avg * _DepthScale;

                _Vertices[smallIndex].z = (float)avg;

                // Update UV mapping with CDRP
                var colorSpacePoint = colorSpace[(y * frameDesc.Width) + x];
                _UV[smallIndex] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            }
        }

        _Mesh.vertices  = _Vertices;
        _Mesh.uv        = _UV;
        _Mesh.triangles = _Triangles;
        _Mesh.RecalculateNormals();
    }
Exemple #6
0
    private List <ValidPoint> DepthToColor()
    {
        // Points to return
        List <ValidPoint> validPoints = new List <ValidPoint>();

        // Get Data
        mDepthData = mMultiSource.GetDepthData();

        // Map
        mMapper.MapDepthFrameToCameraSpace(mDepthData, mCameraSpacePoints);
        mMapper.MapDepthFrameToColorSpace(mDepthData, mColorSpacePoints);


        // Filter
        for (int i = 0; i < mDepthResolution.x; i++)
        {
            for (int j = 0; j < mDepthResolution.y / 8; j++)
            {
                // Sample index
                int sampleIndex = (j * mDepthResolution.x) + i;
                sampleIndex *= 8;

                // Cutoff tests
                if (mCameraSpacePoints[sampleIndex].X < mLeftCutOff)
                {
                    continue;
                }

                if (mCameraSpacePoints[sampleIndex].X > mRightCutOff)
                {
                    continue;
                }

                if (mCameraSpacePoints[sampleIndex].Y > mTopCutOff)
                {
                    continue;
                }

                if (mCameraSpacePoints[sampleIndex].Y < mBottomCutOff)
                {
                    continue;
                }

                // Create point
                ValidPoint newPoint = new ValidPoint(mColorSpacePoints[sampleIndex], mCameraSpacePoints[sampleIndex].Z);

                // Depth test
                if (mCameraSpacePoints[sampleIndex].Z >= mWallDepth)
                {
                    newPoint.mWithinWallDepth = true;
                }

                // Add
                validPoints.Add(newPoint);
            }
        }

        return(validPoints);
    }
Exemple #7
0
    private void RefreshData(ushort[] depthData, int colorWidth, int colorHeight)
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[] colorSpace = new ColorSpacePoint[depthData.Length];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);

        // shader输入参数
        tri[] values = new tri[_TrianglesTemplate.Length / 3];

        for (int i = 0; i < _TrianglesTemplate.Length / 3; i++)
        {
            values[i].index1 = _TrianglesTemplate[i * 3 + 0];
            values[i].index2 = _TrianglesTemplate[i * 3 + 1];
            values[i].index3 = _TrianglesTemplate[i * 3 + 2];

            values[i].depth1 = (int)depthData[IndexTrans(values[i].index1, _DownsampleSize, frameDesc.Width, frameDesc.Height)] / 1000.0f;
            values[i].depth2 = (int)depthData[IndexTrans(values[i].index2, _DownsampleSize, frameDesc.Width, frameDesc.Height)] / 1000.0f;
            values[i].depth3 = (int)depthData[IndexTrans(values[i].index3, _DownsampleSize, frameDesc.Width, frameDesc.Height)] / 1000.0f;

            values[i].p1 = new Vector3(0, 0, 0);
            values[i].p2 = new Vector3(0, 0, 0);
            values[i].p3 = new Vector3(0, 0, 0);
        }

        // compute shader计算
        _Buffer.SetData(values);
        Dispatch(frameDesc.Width, frameDesc.Height);

        // 根据Shader返回的buffer数据更新mesh
        tri[] result = new tri[_TrianglesTemplate.Length / 3];
        _Buffer.GetData(result);

        _Vertices = new Vector3[_TrianglesTemplate.Length];
        _Vertices = new Vector3[_TrianglesTemplate.Length];

        for (int i = 0; i < result.Length; i++)
        {
            _Vertices[i * 3 + 0] = result[i].p1;
            _Vertices[i * 3 + 1] = result[i].p2;
            _Vertices[i * 3 + 2] = result[i].p3;

            var colorSpacePoint = colorSpace[IndexTrans(result[i].index1, _DownsampleSize, frameDesc.Width, frameDesc.Height)];
            _UV[i * 3 + 0]  = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            colorSpacePoint = colorSpace[IndexTrans(result[i].index2, _DownsampleSize, frameDesc.Width, frameDesc.Height)];
            _UV[i * 3 + 1]  = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            colorSpacePoint = colorSpace[IndexTrans(result[i].index3, _DownsampleSize, frameDesc.Width, frameDesc.Height)];
            _UV[i * 3 + 2]  = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);

            _Triangles[i * 3 + 0] = result[i].index1;
            _Triangles[i * 3 + 1] = result[i].index2;
            _Triangles[i * 3 + 2] = result[i].index3;
        }

        _Mesh.vertices  = _Vertices;
        _Mesh.uv        = _UV;
        _Mesh.triangles = _Triangles;
        _Mesh.RecalculateNormals();
    }
    private List <ValidPoint> DepthToColour()
    {
        //Get Depth Data
        mDepthData = mMultiSource.GetDepthData();

        //Map Depth Data
        mMapper.MapDepthFrameToCameraSpace(mDepthData, mCameraSpacePoints);
        mMapper.MapDepthFrameToColorSpace(mDepthData, mColorSpacePoints);

        //Points to return
        List <ValidPoint> validPoints = new List <ValidPoint>();

        // Filter
        for (int i = 0; i < mDepthResolution.x / 8; i++)
        {
            //Divide by 8 to avoid index out of bounds
            for (int j = 0; j < mDepthResolution.y / 8; j++)
            {
                //downsample all of the points to help it run better
                //go through one dimensional array of camera points
                int sampleIndex = (j * mDepthResolution.x) + i;
                //Skip over 8 values between each point
                sampleIndex *= 8;

                //Cut off tests
                if (mCameraSpacePoints[sampleIndex].X < mLeftCutOff)
                {
                    continue;
                }
                if (mCameraSpacePoints[sampleIndex].X > mRightCutOff)
                {
                    continue;
                }
                if (mCameraSpacePoints[sampleIndex].Y > mTopCutOff)
                {
                    continue;
                }
                if (mCameraSpacePoints[sampleIndex].Y < mBottomCutOff)
                {
                    continue;
                }

                //Create a new valid point
                ValidPoint newPoint = new ValidPoint(mColorSpacePoints[sampleIndex], mCameraSpacePoints[sampleIndex].Z);

                //Depth Test
                if (mCameraSpacePoints[sampleIndex].Z >= mWallDepth)
                {
                    newPoint.mWithinWallDepth = true;
                }

                //Add out new pont to valid points
                validPoints.Add(newPoint);
            }
        }

        return(validPoints);
    }
Exemple #9
0
        /// <summary>
        /// creates color info for all DEPTH pixels (to later e.g. write ply file from it)
        /// </summary>
        /// <param name="myColorMetaData"></param>
        /// <param name="myDepthMetaData"></param>
        /// <param name="myCoordinateMapper"></param>
        /// <returns></returns>
        public static byte[] ColorInfoForDepth(ColorMetaData myColorMetaData, DepthMetaData myDepthMetaData, CoordinateMapper myCoordinateMapper)
        {
            //Test_GetCameraPoints(myDepthMetaData, myCoordinateMapper);

            byte[] mydisplayPixels = new byte[DepthMetaData.XDepthMaxKinect * DepthMetaData.YDepthMaxKinect * MetaDataBase.BYTES_PER_PIXEL];
            //mapped data
            int numberOfPoints  = 0;
            int notMappedPixels = -1;

            if (myColorMetaData != null)
            {
                ColorSpacePoint[] mycolorPointsInDepthSpace = new ColorSpacePoint[DepthMetaData.XDepthMaxKinect * DepthMetaData.YDepthMaxKinect];

                myCoordinateMapper.MapDepthFrameToColorSpace(myDepthMetaData.FrameData, mycolorPointsInDepthSpace);


                for (int x = 0; x < DepthMetaData.XDepthMaxKinect; ++x)
                {
                    for (int y = 0; y < DepthMetaData.YDepthMaxKinect; ++y)
                    {
                        int depthIndex = (y * DepthMetaData.XDepthMaxKinect) + x;

                        if (myDepthMetaData.FrameData[depthIndex] != 0)
                        {
                            ColorSpacePoint colorPoint = mycolorPointsInDepthSpace[depthIndex];


                            int xColor          = Convert.ToInt32(colorPoint.X);
                            int yColor          = Convert.ToInt32(colorPoint.Y);
                            int depthIndexColor = depthIndex * MetaDataBase.BYTES_PER_PIXEL;

                            if ((xColor >= 0) && (xColor < ColorMetaData.XColorMaxKinect) && (yColor >= 0) && (yColor < ColorMetaData.YColorMaxKinect))
                            {
                                int colorIndex = ((yColor * ColorMetaData.XColorMaxKinect) + xColor) * MetaDataBase.BYTES_PER_PIXEL;

                                mydisplayPixels[depthIndexColor + 0] = myColorMetaData.Pixels[colorIndex];
                                mydisplayPixels[depthIndexColor + 1] = myColorMetaData.Pixels[colorIndex + 1];
                                mydisplayPixels[depthIndexColor + 2] = myColorMetaData.Pixels[colorIndex + 2];
                                mydisplayPixels[depthIndexColor + 3] = 0xff;
                                numberOfPoints++;
                            }
                            else
                            {
                                notMappedPixels++;
                                mydisplayPixels[depthIndexColor + 0] = 255;
                                mydisplayPixels[depthIndexColor + 1] = 255;
                                mydisplayPixels[depthIndexColor + 2] = 255;
                                mydisplayPixels[depthIndexColor + 3] = 0xff;
                            }
                        }
                    }
                }
            }
            //System.Diagnostics.Debug.WriteLine("---------> Number of Depth points: " + numberOfPoints.ToString());


            return(mydisplayPixels);
        }
    private void RefreshData(ushort[] depthData, int colorWidth, int colorHeight)
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[] colorSpace = new ColorSpacePoint[depthData.Length];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);

        if (backgroundRemoval)
        {
            bodyIndexData = _MultiManager.GetBodyIndexData();
            for (int y = 0; y < frameDesc.Height; y++)
            {
                for (int x = 0; x < frameDesc.Width; x++)
                {
                    int  bodyIndex = y * frameDesc.Width + x;
                    byte player    = bodyIndexData [bodyIndex];
                    if (player == 0xFF)
                    {
                        depthData [bodyIndex] = 0;
                    }
                }
            }
        }

        ///flip vertices
        //for(int y = 0; y < frameDesc.Height; y++)


        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int    indexX     = x / _DownsampleSize;
                int    indexY     = y / _DownsampleSize;
                int    smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX;
                double avg        = GetAvg(depthData, x, y, frameDesc.Width, frameDesc.Height);

                avg = avg * _DepthScale;

                _Vertices[smallIndex].z = (float)avg;

                var colorSpacePoint = colorSpace[(y * frameDesc.Width) + x];

                // Update UV mapping with CDRP

                if (colorSpacePoint.X != -1.0f && colorSpacePoint.Y != 1.0f)
                {
                    _UV[smallIndex] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
                }
                //_UV[smallIndex] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            }
        }
        _Mesh.vertices  = _Vertices;
        _Mesh.uv        = _UV;
        _Mesh.triangles = _Triangles;
        _Mesh.RecalculateNormals();
    }
    void Update()
    {
        if (_Reader != null)
        {
            var frame = _Reader.AcquireLatestFrame();
            if (frame != null)
            {
                var colorFrame = frame.ColorFrameReference.AcquireFrame();
                if (colorFrame != null)
                {
                    var depthFrame = frame.DepthFrameReference.AcquireFrame();
                    if (depthFrame != null)
                    {
                        // Populate arrays - Save all required data
                        colorFrame.CopyConvertedFrameDataToArray(_ColorData, ColorImageFormat.Rgba);
                        depthFrame.CopyFrameDataToArray(_DepthData);

                        // Map to color from depth frame
                        _Mapper.MapDepthFrameToColorSpace(_DepthData, _ColorPoints);

                        // Clip the depth frame
                        EvaluateDepthData();

                        // Clip the color frame
                        EvaluateColorData();

                        // Get chosen depth data from initial depth data.
                        DSPDepthData();

                        // Get chosen color data from initial color data.
                        DSPColorData();

                        // Apply Color Texture
                        _ColorTexture.LoadRawTextureData(_ColorData);
                        _ColorTexture.Apply();

                        // Apply Clipped Color Texture
                        _ClippedColorTexture.LoadRawTextureData(_ClippedColorData);
                        _ClippedColorTexture.Apply();

                        // Apply Chosen Color Texture
                        _ChosenColorTexture.LoadRawTextureData(_ChosenColorData);
                        _ChosenColorTexture.Apply();

                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    colorFrame.Dispose();
                    colorFrame = null;
                }

                frame = null;
            }
        }
    }
        /// <summary>
        /// Densely store depth to color mapping as BLKD.
        ///
        /// Returns the number of shorts written to buffer.
        /// </summary>
        /// <param name="frame">KinectScanner.Reading.Frame</param>
        /// <param name="filename">filename to store the mapping</param>
        /// <returns></returns>
        public Tuple <Blkd, TimeSpan> CaptureMappedFrame(LiveFrame frame, byte[] buffer)
        {
            DepthFrame       depthFrame = frame.NativeDepthFrame;
            CoordinateMapper mapper     = frame.NativeCoordinateMapper;

            if (buffer.Length != Frame.DEPTH_INFRARED_PIXELS * DEPTH_MAPPING_BYTES_PER_PIXEL)
            {
                throw new ArgumentException(string.Format("Buffer length is {0} but {1} is needed", buffer.LongLength, Frame.DEPTH_INFRARED_PIXELS * DEPTH_MAPPING_BYTES_PER_PIXEL));
            }

            depthFrame.CopyFrameDataToArray(_depthData);
            mapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);
            mapper.MapDepthFrameToCameraSpace(_depthData, _cameraSpacePoints);

            Array.Clear(buffer, 0, buffer.Length);
            int count = 0;

            for (int i = 0; i < Frame.DEPTH_INFRARED_PIXELS; ++i)
            {
                ColorSpacePoint  colorPoint  = _colorPoints[i];
                CameraSpacePoint cameraPoint = _cameraSpacePoints[i];

                // make sure the depth pixel maps to a valid point in color space
                short colorX = (short)Math.Floor(colorPoint.X + 0.5);
                short colorY = (short)Math.Floor(colorPoint.Y + 0.5);

                if (colorX < 0 || colorX >= Frame.COLOR_WIDTH || colorY < 0 || colorY >= Frame.COLOR_HEIGHT)
                {
                    colorX = -1;
                    colorY = -1;
                }

                // Little endian === lowest order bytes at lower addresses
                buffer[count++] = (byte)(colorX >> 0);
                buffer[count++] = (byte)(colorX >> 8);

                buffer[count++] = (byte)(colorY >> 0);
                buffer[count++] = (byte)(colorY >> 8);

                float[] cameraPointValues = new float[] { cameraPoint.X, cameraPoint.Y, cameraPoint.Z };
                System.Buffer.BlockCopy(cameraPointValues, 0, buffer, count, 12);
                count += 12;
            }

            Blkd result = new Blkd
            {
                Width         = (UInt16)Frame.DEPTH_INFRARED_WIDTH,
                Height        = (UInt16)Frame.DEPTH_INFRARED_HEIGHT,
                BytesPerPixel = DEPTH_MAPPING_BYTES_PER_PIXEL,
                Version       = 2,
                Data          = buffer
            };

            return(new Tuple <Blkd, TimeSpan>(result, depthFrame.RelativeTime));
        }
    /// <summary>
    /// Update kinect data in multithread
    /// </summary>
    private void UpdateKinectData()
    {
        bool _done = false;

        Chrono chrono = new Chrono();

        chrono.Start();

        double dt = 0.033;

        double next_schedule = 0.0;

        Debug.Log("Kinect acquisition started");

        while (!_done)
        {
            double current_time = chrono.GetElapsedTime();

            if (current_time > next_schedule)
            {
                next_schedule += dt;

                // Get depth map
                UpdateDepthData();

                // Get color map
                UpdateColorData();

                // Convert depth map into point cloud
                _cameraSpace = new CameraSpacePoint[_depthData.Length];

                lock (_cameraSpace)
                {
                    _mapper.MapDepthFrameToCameraSpace(_depthData, _cameraSpace);
                }

                // Map depth map to color map
                _colorSpace = new ColorSpacePoint[_depthData.Length];

                lock (_colorSpace)
                {
                    _mapper.MapDepthFrameToColorSpace(_depthData, _colorSpace);
                }
            }

            lock (doneLock)
            {
                _done = done;
            }

            Thread.Sleep(0);
        }

        Debug.Log("Kinect acquisition stopped");
    }
Exemple #14
0
    private List <ValidPoint> DepthToColor()
    {
        List <ValidPoint> ValidPoints = new List <ValidPoint>();

        //Get depth
        mDepthData = mMultiSource.GetDepthData();

        mMapper.MapDepthFrameToCameraSpace(mDepthData, mCameraSapcePoints);
        mMapper.MapDepthFrameToColorSpace(mDepthData, mColorSpacePoints);

        //Filter
        for (int i = 0; i < mDepthResolution.x / 8; i++)
        {
            for (int j = 0; j < mDepthResolution.y / 8; j++)
            {
                int sampleIndex = (j * mDepthResolution.x) + i;
                sampleIndex *= 8;

                //CutoffTests
                if (mCameraSapcePoints[sampleIndex].X < mLeftCutOff)
                {
                    continue;
                }
                if (mCameraSapcePoints[sampleIndex].X > mRigthCutOff)
                {
                    continue;
                }
                if (mCameraSapcePoints[sampleIndex].Y > mTopCutOff)
                {
                    continue;
                }
                if (mCameraSapcePoints[sampleIndex].Y < mBottomCutOff)
                {
                    continue;
                }

                ValidPoint newPoint = new ValidPoint(mColorSpacePoints[sampleIndex], mCameraSapcePoints[sampleIndex].Z);

                if (mCameraSapcePoints[sampleIndex].Z >= mWallDepth)
                {
                    newPoint.mWithinWallDepth = true;
                }

                ValidPoints.Add(newPoint);
            }
        }
        return(ValidPoints);
    }
Exemple #15
0
    private void RefreshData()
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;


        _Mapper.MapDepthFrameToCameraSpace(_Data, camSpace);
        _Mapper.MapDepthFrameToColorSpace(_Data, colorSpace);

        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int indexX     = x / _DownsampleSize;
                int indexY     = y / _DownsampleSize;
                int realIndex  = y * frameDesc.Width + x;
                int smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX;

                //double avg = GetAvg(_Data, x, y, frameDesc.Width, frameDesc.Height);
                //avg = avg * _DepthScale + _Offset.z;


                //_Vertices[smallIndex].z = (float)avg;

                CameraSpacePoint p       = camSpace[realIndex];
                Vector3          targetP = (new Vector3(p.X, p.Y, p.Z) + _Offset) * _Scale;

                if (float.IsInfinity(_Vertices[smallIndex].z))
                {
                    _Vertices[smallIndex] = targetP;
                }
                else
                {
                    _Vertices[smallIndex] = _Vertices[smallIndex] + (targetP - _Vertices[smallIndex]) * motionSmooth;// _OldVertices[smallIndex] + (targetP - _OldVertices[smallIndex]) / motionSmooth ;
                }


                // Update UV mapping with CDRP
                var colorSpacePoint = colorSpace[(y * frameDesc.Width) + x];
                _UV[smallIndex] = new Vector2(colorSpacePoint.X * 1.0f / 1920, colorSpacePoint.Y * 1.0f / 1080);
            }
        }

        _Mesh.vertices  = _Vertices;
        _Mesh.uv        = _UV;
        _Mesh.triangles = _Triangles;
        _Mesh.RecalculateNormals();
    }
        private void DrawDepthCoodinate()
        {
            // Depth画像の解像度でデータを作る
            var colorImageBuffer = new byte[depthFrameDesc.LengthInPixels *
                                            colorFrameDesc.BytesPerPixel];

            // Depth座標系に対応するカラー座標系の一覧を取得する
            var colorSpace = new ColorSpacePoint[depthFrameDesc.LengthInPixels];

            mapper.MapDepthFrameToColorSpace(depthBuffer, colorSpace);

            // 並列で処理する
            Parallel.For(0, depthFrameDesc.LengthInPixels, i =>
            {
                int colorX = (int)colorSpace[i].X;
                int colorY = (int)colorSpace[i].Y;
                if ((colorX < 0) || (colorFrameDesc.Width <= colorX) ||
                    (colorY < 0) || (colorFrameDesc.Height <= colorY))
                {
                    return;
                }

                // カラー座標系のインデックス
                int colorIndex = (colorY * colorFrameDesc.Width) + colorX;
                int bodyIndex  = bodyIndexBuffer[i];

                // 人を検出した位置だけ色を付ける
                if (bodyIndex == 255)
                {
                    return;
                }

                // カラー画像を設定する
                int colorImageIndex  = (int)(i * colorFrameDesc.BytesPerPixel);
                int colorBufferIndex = (int)(colorIndex * colorFrameDesc.BytesPerPixel);
                colorImageBuffer[colorImageIndex + 0] = colorBuffer[colorBufferIndex + 0];
                colorImageBuffer[colorImageIndex + 1] = colorBuffer[colorBufferIndex + 1];
                colorImageBuffer[colorImageIndex + 2] = colorBuffer[colorBufferIndex + 2];
            });

            ImageColor.Source = BitmapSource.Create(
                depthFrameDesc.Width, depthFrameDesc.Height, 96, 96,
                PixelFormats.Bgr32, null, colorImageBuffer,
                (int)(depthFrameDesc.Width * colorFrameDesc.BytesPerPixel));
        }
Exemple #17
0
    unsafe void Update()
    {
        if (_Reader != null)
        {
            var frame = _Reader.AcquireLatestFrame();

            if (frame != null)
            {
                fixed(byte *pData = _Data)
                {
                    frame.CopyFrameDataToIntPtr(new System.IntPtr(pData), (uint)_Data.Length);
                }

                frame.CopyFrameDataToArray(_sData);
                _DepthTexture.LoadRawTextureData(_Data);
                ColorSpacePoint[] colorSpace = new ColorSpacePoint[_sData.Length];
                _Mapper.MapDepthFrameToColorSpace(_sData, colorSpace);
                Color32[] pixels = _UVTexture.GetPixels32();
                for (int i = 0; i < colorSpace.Length; i++)
                {
                    pixels[i].r = (byte)((colorSpace[i].X / 1920) * 255);                   //ranges from 0 to 255
                    float remainderX = colorSpace[i].X / 1920 * 255 - pixels[i].r;
                    pixels[i].b = (byte)(remainderX * 255);
                    pixels[i].g = (byte)((colorSpace[i].Y / 1080) * 255);
                    float remainderY = colorSpace[i].Y / 1080 * 255 - pixels[i].g;
                    pixels[i].a = (byte)(remainderY * 255);
                }

                _UVTexture.SetPixels32(pixels);
                _UVTexture.Apply();
                _DepthTexture.Apply();


                frame.Dispose();


                frame = null;
            }
        }
    }
Exemple #18
0
        /// <summary>
        /// creates color info for all DEPTH pixels (to later e.g. write ply file from it)
        /// </summary>
        /// <param name="myColorMetaData"></param>
        /// <param name="myDepthMetaData"></param>
        /// <param name="myCoordinateMapper"></param>
        /// <returns></returns>
        public static PointCloud ToPointCloud(ColorMetaData myColorMetaData, DepthMetaData myDepthMetaData, BodyMetaData myBodyMetaData, CoordinateMapper myCoordinateMapper)
        {
            if (myColorMetaData == null && myDepthMetaData == null)
            {
                return(null);
            }

            ColorSpacePoint[]  mycolorPointsInDepthSpace = new ColorSpacePoint[DepthMetaData.XDepthMaxKinect * DepthMetaData.YDepthMaxKinect];
            CameraSpacePoint[] myRealWorldPoints         = new CameraSpacePoint[DepthMetaData.XDepthMaxKinect * DepthMetaData.YDepthMaxKinect];
            myCoordinateMapper.MapDepthFrameToCameraSpace(myDepthMetaData.FrameData, myRealWorldPoints);
            myCoordinateMapper.MapDepthFrameToColorSpace(myDepthMetaData.FrameData, mycolorPointsInDepthSpace);


            if (myColorMetaData != null)
            {
                return(PointCloudWithColorParallel(mycolorPointsInDepthSpace, myRealWorldPoints, myColorMetaData, myDepthMetaData, myBodyMetaData));
            }
            else
            {
                return(PointCloudBW(myRealWorldPoints, myDepthMetaData, myBodyMetaData));
            }
        }
Exemple #19
0
    private void UpdateData(ushort[] depthData, Texture2D colorData, int colorWidth, int colorHeight)
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[]  colorSpace  = new ColorSpacePoint[depthData.Length];
        CameraSpacePoint[] cameraSpace = new CameraSpacePoint[depthData.Length];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);
        _Mapper.MapDepthFrameToCameraSpace(depthData, cameraSpace);

        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int indexX     = x / _DownsampleSize;
                int indexY     = y / _DownsampleSize;
                int offsetX    = x + offset;
                int offsetY    = y + offset;
                int smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX;
                int fullIndex  = (offsetY * frameDesc.Width) + offsetX;

                _Vertices[smallIndex].z = cameraSpace[fullIndex].Z - 1;

                if (cameraSpace[fullIndex].X > -1000 && cameraSpace[smallIndex].X < 1000)
                {
                    _Vertices[smallIndex].x = cameraSpace[fullIndex].X;
                }
                if (cameraSpace[fullIndex].Y > -1000 && cameraSpace[fullIndex].Y < 1000)
                {
                    _Vertices[smallIndex].y = cameraSpace[fullIndex].Y;
                }
                var colorSpacePoint = colorSpace[(y * frameDesc.Width) + x];
                _Colors[smallIndex] = colorData.GetPixel((int)colorSpacePoint.X, (int)colorSpacePoint.Y);
            }
        }

        _Mesh.vertices = _Vertices;
        _Mesh.colors   = _Colors;
        _Mesh.SetIndices(_Indices, MeshTopology.Points, 0);
    }
        void Update()
        {
            // Back to main menu
            if (Input.GetKeyDown(KeyCode.Escape))
            {
                UnityEngine.SceneManagement.SceneManager.LoadScene("MainMenu");
            }

            _multiManager = KinectSource.GetComponent <MultiSourceManager>();

            if (_sensor == null || KinectSource == null || _multiManager == null)
            {
                return;
            }

            var depthData = _multiManager.GetDepthData();

            ColorSpacePoint[] colorSpacePoints = new ColorSpacePoint[depthData.Length];
            _mapper.MapDepthFrameToColorSpace(depthData, colorSpacePoints);

            _kinectMesh.LoadDepthData(depthData, _multiManager.GetColorTexture(), colorSpacePoints, _multiManager.ColorWidth, _multiManager.ColorHeight);
        }
Exemple #21
0
    void updateMesh()
    {
        GetComponent <Renderer>().material.mainTexture = SourceManager.getColorTexture();

        ushort[] depthData = SourceManager.getDepthData();
        if (depthData != null)
        {
            int depthWidth  = SourceManager.getDepthWidth();
            int depthHeight = SourceManager.getDepthHeight();
            int colorWidth  = SourceManager.getColorWidth();
            int colorHeight = SourceManager.getColorHeight();

            ColorSpacePoint[] colorSpace = new ColorSpacePoint[depthData.Length];
            mapper.MapDepthFrameToColorSpace(depthData, colorSpace);

            for (int y = 0; y < depthHeight; y += DOWNSAMPLE)
            {
                for (int x = 0; x < depthWidth; x += DOWNSAMPLE)
                {
                    int index = (y / DOWNSAMPLE) * (depthWidth / DOWNSAMPLE) + (x / DOWNSAMPLE);

                    vertices[index].z  = getAvgDepth(depthData, x, y, depthWidth, depthHeight) * M_PER_MM;
                    vertices[index].x  = vertices[index].z * TAN_HALF_H_FOV * (2.0f * x / depthWidth - 1.0f);
                    vertices[index].y  = vertices[index].z * TAN_HALF_V_FOV * -(2.0f * y / depthHeight - 1.0f);
                    vertices[index].z -= MAX_DISTANCE;

                    ColorSpacePoint colorSpacePoint = colorSpace[y * depthWidth + x];
                    uv[index] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
                }
            }
            transform.position = new Vector3(0f, 0f, MAX_DISTANCE); //To evade the near clipping planes

            mesh.vertices  = vertices;
            mesh.uv        = uv;
            mesh.triangles = triangles;
            mesh.RecalculateNormals();
        }
    }
    public override void saveNextFrame()
    {
        var entry      = Frames.Dequeue();
        var colorEntry = ColorFrames.Dequeue();
        var colorFrame = colorEntry.Resource;

        mapper.MapDepthFrameToColorSpace(entry.Resource, points);

        Array.Clear(output, 0, output.Length);
        for (var y = 0; y < Recorder.DEPTH_HEIGHT; y++)
        {
            for (var x = 0; x < Recorder.DEPTH_WIDTH; x++)
            {
                int depthIndex = x + (y * Recorder.DEPTH_WIDTH);
                var cPoint     = points[depthIndex];
                int colorX     = (int)Math.Floor(cPoint.X + 0.5);
                int colorY     = (int)Math.Floor(cPoint.Y + 0.5);

                if ((colorX >= 0) && (colorX < Recorder.COLOR_WIDTH) && (colorY >= 0) && (colorY < Recorder.COLOR_HEIGHT))
                {
                    int colorIndex   = ((colorY * Recorder.COLOR_WIDTH) + colorX) * 4;
                    int displayIndex = depthIndex * 4;

                    output[displayIndex + 0] = colorFrame[colorIndex];
                    output[displayIndex + 1] = colorFrame[colorIndex + 1];
                    output[displayIndex + 2] = colorFrame[colorIndex + 2];
                    output[displayIndex + 3] = 0xff;
                }
            }
        }

        var trackedFramePath = Path.Combine(CurrentDirectory, CurrentFrame.ToString() + ".uint8");

        File.WriteAllBytes(trackedFramePath, output);

        entry.Free();
        colorEntry.Free();
    }
    //透過不同模式所傳入的資料來源將從不同的Script匯入該Function中,去運算每次Update中所得到的Mesh。
    private void RefreshData(ushort[] depthData, int colorWidth, int colorHeight)
    {
        Debug.Log("W = " + colorWidth);
        Debug.Log("H = " + colorHeight);
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[] colorSpace = new ColorSpacePoint[depthData.Length];
        Debug.Log("Old " + depthData.Length);
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);
        Debug.Log("New " + depthData.Length);

        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int indexX     = x / _DownsampleSize;                                     //Mesh的X軸點數。
                int indexY     = y / _DownsampleSize;                                     //Mesh的Y軸點數。
                int smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX; //Mesh的所有點數以一維方式排列所取得的總數。

                double avg = GetAvg(depthData, x, y, frameDesc.Width, frameDesc.Height);  //計算每4X4個Pixel的深度平均值。

                avg = avg * _DepthScale;                                                  //降低深度的比率。

                _Vertices[smallIndex].z = (float)avg;                                     //給予每個Mesh的點Z值。

                // Update UV mapping with CDRP
                var colorSpacePoint = colorSpace[(y * frameDesc.Width) + x];

                _UV[smallIndex] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            }
        }

        _Mesh.vertices  = _Vertices;
        _Mesh.uv        = _UV;
        _Mesh.triangles = _Triangles;
        _Mesh.RecalculateNormals();
    }
Exemple #24
0
    void Update()
    {
        if (_Sensor == null)
        {
            return;
        }

        if (_MultiManager == null)
        {
            return;
        }


        ushort[] rawdata = _MultiManager.GetDepthData();

        _Mapper.MapDepthFrameToColorSpace(rawdata, _colorSpacePoints);
        _Mapper.MapDepthFrameToCameraSpace(rawdata, _cameraSpacePoints);

        for (int i = 0; i < _cameraSpacePoints.Length; i++)
        {
            vertices[i] = new Vector3(_cameraSpacePoints[i].X, _cameraSpacePoints[i].Y, _cameraSpacePoints[i].Z);
            Colors[i]   = Color.white;
        }
    }
    private void renderColorOnDepthThread()
    {
        while (runColorOnDepthThread)
        {
            if (newColorOnDepthFrame && recordColorOnDepthFrame)
            {
                newColorOnDepthFrame = false;

                mapper.MapDepthFrameToColorSpace(depthData, points);
                Array.Clear(output, 0, output.Length);
                for (var y = 0; y < DEPTH_HEIGHT; y++)
                {
                    for (var x = 0; x < DEPTH_WIDTH; x++)
                    {
                        int depthIndex = x + (y * DEPTH_WIDTH);
                        var cPoint     = points[depthIndex];
                        int colorX     = (int)Math.Floor(cPoint.X + 0.5);
                        int colorY     = (int)Math.Floor(cPoint.Y + 0.5);

                        if ((colorX >= 0) && (colorX < COLOR_WIDTH) && (colorY >= 0) && (colorY < COLOR_HEIGHT))
                        {
                            int ci           = ((colorY * COLOR_WIDTH) + colorX) * 4;
                            int displayIndex = depthIndex * 4;

                            output[displayIndex + 0] = colorData[ci];
                            output[displayIndex + 1] = colorData[ci + 1];
                            output[displayIndex + 2] = colorData[ci + 2];
                            output[displayIndex + 3] = 0xff;
                        }
                    }
                }
            }

            Thread.Sleep(1);
        }
    }
        /// <summary>
        /// Process the color and depth inputs, converting the color into the depth space
        /// </summary>
        private unsafe void MapColorToDepth()
        {
            mapper.MapDepthFrameToColorSpace(depthImagePixels, colorCoordinates);

            lock (rawDataLock)
            {
                // Fill in the visibility depth map.
                Array.Clear(depthVisibilityTestMap, 0, depthVisibilityTestMap.Length);
                fixed(ushort *ptrDepthVisibilityPixels = depthVisibilityTestMap, ptrDepthPixels = depthImagePixels)
                {
                    for (int index = 0; index < depthImagePixels.Length; ++index)
                    {
                        if (!float.IsInfinity(colorCoordinates[index].X) && !float.IsInfinity(colorCoordinates[index].Y))
                        {
                            int x = (int)(Math.Floor(colorCoordinates[index].X + 0.5f) / ColorDownsampleFactor);
                            int y = (int)(Math.Floor(colorCoordinates[index].Y + 0.5f) / ColorDownsampleFactor);

                            if ((x >= 0) && (x < depthVisibilityTestMapWidth) &&
                                (y >= 0) && (y < depthVisibilityTestMapHeight))
                            {
                                int depthVisibilityTestIndex = (y * depthVisibilityTestMapWidth) + x;
                                if ((ptrDepthVisibilityPixels[depthVisibilityTestIndex] == 0) ||
                                    (ptrDepthVisibilityPixels[depthVisibilityTestIndex] > ptrDepthPixels[index]))
                                {
                                    ptrDepthVisibilityPixels[depthVisibilityTestIndex] = ptrDepthPixels[index];
                                }
                            }
                        }
                    }
                }

                // Here we make use of unsafe code to just copy the whole pixel as an int for performance reasons, as we do
                // not need access to the individual rgba components.
                fixed(byte *ptrColorPixels = colorImagePixels)
                {
                    int *rawColorPixels = (int *)ptrColorPixels;

                    // Horizontal flip the color image as the standard depth image is flipped internally in Kinect Fusion
                    // to give a viewpoint as though from behind the Kinect looking forward by default.
                    Parallel.For(
                        0,
                        depthHeight,
                        y =>
                    {
                        int destIndex        = y * depthWidth;
                        int flippedDestIndex = destIndex + (depthWidth - 1);     // horizontally mirrored

                        for (int x = 0; x < depthWidth; ++x, ++destIndex, --flippedDestIndex)
                        {
                            // calculate index into depth array
                            int colorInDepthX            = (int)Math.Floor(colorCoordinates[destIndex].X + 0.5);
                            int colorInDepthY            = (int)Math.Floor(colorCoordinates[destIndex].Y + 0.5);
                            int depthVisibilityTestX     = (int)(colorInDepthX / ColorDownsampleFactor);
                            int depthVisibilityTestY     = (int)(colorInDepthY / ColorDownsampleFactor);
                            int depthVisibilityTestIndex = (depthVisibilityTestY * depthVisibilityTestMapWidth) + depthVisibilityTestX;

                            // make sure the depth pixel maps to a valid point in color space
                            if (colorInDepthX >= 0 && colorInDepthX < colorWidth && colorInDepthY >= 0 &&
                                colorInDepthY < colorHeight && depthImagePixels[destIndex] != 0)
                            {
                                ushort depthTestValue = depthVisibilityTestMap[depthVisibilityTestIndex];

                                if ((depthImagePixels[destIndex] - depthTestValue) < DepthVisibilityTestThreshold)
                                {
                                    // Calculate index into color array- this will perform a horizontal flip as well
                                    int sourceColorIndex = colorInDepthX + (colorInDepthY * colorWidth);

                                    // Copy color pixel
                                    resampledColorImagePixelsAlignedToDepth[flippedDestIndex] = rawColorPixels[sourceColorIndex];
                                }
                                else
                                {
                                    resampledColorImagePixelsAlignedToDepth[flippedDestIndex] = 0;
                                }
                            }
                            else
                            {
                                resampledColorImagePixelsAlignedToDepth[flippedDestIndex] = 0;
                            }
                        }
                    });
                }
            }

            resampledColorFrameDepthAligned.CopyPixelDataFrom(resampledColorImagePixelsAlignedToDepth);
        }
Exemple #27
0
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public BitmapSource GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth  = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth  = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData     = new ushort[depthWidth * depthHeight];
                _bodyData      = new byte[depthWidth * depthHeight];
                _colorData     = new byte[colorWidth * colorHeight * BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * BYTES_PER_PIXEL];
                _colorPoints   = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap        = new WriteableBitmap(depthWidth, depthHeight, DPI, DPI, FORMAT, null);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);//深度轉到彩色空間

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex   = ((colorY * colorWidth) + colorX) * BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;//0xFF  =>0000000011111111 // 只取後面8bit

                                //0xffffffff
                                //  B G R A
                            }
                        }
                    }
                }

                _bitmap.Lock();

                Marshal.Copy(_displayPixels, 0, _bitmap.BackBuffer, _displayPixels.Length);
                _bitmap.AddDirtyRect(new Int32Rect(0, 0, depthWidth, depthHeight));

                _bitmap.Unlock();
            }

            return(_bitmap);
        }
    private void RefreshData(ushort[] depthData)
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[] colorSpace = new ColorSpacePoint[depthData.Length];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);
        Hashtable depthCount = new Hashtable();

        if (_flagDeskDepth)
        {
            int maxCount = 0;
            for (int i = 0; i < depthData.Length; i++)
            {
                if (depthCount.ContainsKey(depthData[i]))
                {
                    if ((int)depthData[i] != 0)
                    {
                        depthCount[depthData[i]] = (int)depthCount[depthData[i]] + 1;
                    }
                    if ((int)depthCount[depthData[i]] > maxCount)
                    {
                        maxCount     = (int)depthCount[depthData[i]];
                        _maxCountKey = depthData[i];
                    }
                }
                else
                {
                    depthCount.Add(depthData[i], 1);
                }
            }
            _flagDeskDepth = false;
            Debug.Log(_maxCountKey);
            //Debug.Log("Max" + maxCountKey);
        }
        //foreach (ushort key in depthCount.Keys)
        //{
        //    if((int)depthCount[key] > maxCount)
        //    {
        //        maxCount = (int)depthCount[key];
        //        maxCountKey = key;
        //    }
        //}
        //Debug.Log(maxCountKey);
        //設定存放depth的Mat大小
        _Depth = new Mat(frameDesc.Height / _DownsampleSize, frameDesc.Width / _DownsampleSize, CvType.CV_8UC1);

        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int indexX     = x / _DownsampleSize;
                int indexY     = y / _DownsampleSize;
                int smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX;

                double avg = GetAvg(depthData, x, y, frameDesc.Width, frameDesc.Height);

                //avg = avg * _DepthScale;

                if (indexX == 64 && indexY == 53 && _kinectDistance != null)
                {
                    _kinectDistance.text = "distance: " + avg;
                }
                if (x == 0 && y == 0)
                {
                    Distance_UpLeft = avg;
                }
                if (x == (frameDesc.Width - _DownsampleSize) && y == 0)
                {
                    Distance_UpRight = avg;
                }
                if (x == 0 && y == (frameDesc.Height - _DownsampleSize))
                {
                    Distance_DownLeft = avg;
                }
                if (x == (frameDesc.Width - _DownsampleSize) && y == (frameDesc.Height - _DownsampleSize))
                {
                    Distance_DonwRight = avg;
                }
                //距離1000mm正負200mm

                //avg = (avg > (double)(_maxCountKey + _binaryIndex)) ? 0 : 255;
                avg = 255 - (avg / 4000 * 255);
                avg = (avg == 255) ? 0 : avg;

                _Vertices[smallIndex].z = (float)avg;
                _Depth.put(y / _DownsampleSize, frameDesc.Width / _DownsampleSize - x / _DownsampleSize, avg);
                // Update UV mapping with CDRP
                //var colorSpacePoint = colorSpace[(y * frameDesc.Width) + x];
                // _UV[smallIndex] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            }
        }
    }
Exemple #29
0
    private void RefreshData(ushort[] depthData, int colorWidth, int colorHeight)
    {
        var frameDesc = _Sensor.DepthFrameSource.FrameDescription;

        ColorSpacePoint[]  colorSpace  = new ColorSpacePoint[depthData.Length];
        CameraSpacePoint[] cameraSpace = new CameraSpacePoint[depthData.Length];
        _Mapper.MapDepthFrameToColorSpace(depthData, colorSpace);
        _Mapper.MapDepthFrameToCameraSpace(depthData, cameraSpace);

        for (int y = 0; y < frameDesc.Height; y += _DownsampleSize)
        {
            for (int x = 0; x < frameDesc.Width; x += _DownsampleSize)
            {
                int indexX     = x / _DownsampleSize;
                int indexY     = y / _DownsampleSize;
                int smallIndex = (indexY * (frameDesc.Width / _DownsampleSize)) + indexX;
                int bigIndex   = y * frameDesc.Width + x;

                _Vertices[smallIndex].x = -cameraSpace[bigIndex].X;
                _Vertices[smallIndex].y = cameraSpace[bigIndex].Y;
                _Vertices[smallIndex].z = cameraSpace[bigIndex].Z;

                // Update UV mapping with CDRP
                var colorSpacePoint = colorSpace[bigIndex];
                _UV[smallIndex] = new Vector2(colorSpacePoint.X / colorWidth, colorSpacePoint.Y / colorHeight);
            }
        }

        _Mesh.vertices  = _Vertices;
        _Mesh.uv        = _UV;
        _Mesh.triangles = _Triangles;
        _Mesh.RecalculateNormals();

        bool sendMesh = false;

        lock (readyToSendLock)
        {
            sendMesh = readyToSendMesh;
        }

        if (sendMesh)
        {
            var depthWidth  = frameDesc.Width / _DownsampleSize;
            var depthHeight = frameDesc.Height / _DownsampleSize;
            var depthCount  = depthWidth * depthHeight;

            // Breite und Höhe Tiefendaten schicken
            SendDataToClient(BitConverter.GetBytes(depthWidth));
            SendDataToClient(BitConverter.GetBytes(depthHeight));

            // Punktdaten und UV-Daten sammeln und als einzelne Nachricht schicken
            // 5 Floats (X, Y, Z, U, V)
            int    byteCount  = 5 * sizeof(float) * depthCount;
            byte[] dataToSend = new byte[byteCount];
            for (int i = 0; i < depthCount; i++)
            {
                Buffer.BlockCopy(BitConverter.GetBytes(_Vertices[i].x), 0, dataToSend, i * 5 * sizeof(float), sizeof(float));
                Buffer.BlockCopy(BitConverter.GetBytes(_Vertices[i].y), 0, dataToSend, (i * 5 + 1) * sizeof(float), sizeof(float));
                Buffer.BlockCopy(BitConverter.GetBytes(_Vertices[i].z), 0, dataToSend, (i * 5 + 2) * sizeof(float), sizeof(float));
                Buffer.BlockCopy(BitConverter.GetBytes(_UV[i].x), 0, dataToSend, (i * 5 + 3) * sizeof(float), sizeof(float));
                Buffer.BlockCopy(BitConverter.GetBytes(_UV[i].y), 0, dataToSend, (i * 5 + 4) * sizeof(float), sizeof(float));
            }

            SendDataToClient(dataToSend.ToArray());

            // Farbdaten mittels JPG komprimieren und als einzelne Nachricht schicken
            byte[] colorDataCopy = new byte[colorData.Length];
            byte[] jpgData;
            Buffer.BlockCopy(colorData, 0, colorDataCopy, 0, colorData.Length);
            // Der nachfolgende Code zur JPG-Kompression vertauscht Rot- und Blaukanal, deswegen tauschen wir ihn hier auch schonmal
            for (int y = 0; y < colorHeight; y++)
            {
                for (int x = 0; x < colorWidth; x++)
                {
                    int index = y * colorWidth * 4 + x * 4;
                    // Vertausche erstes Byte (Rot) und drittes Byte (Blau)
                    byte temp = colorDataCopy[index];
                    colorDataCopy[index]     = colorDataCopy[index + 2];
                    colorDataCopy[index + 2] = temp;
                }
            }

            try
            {
                // Quelle: https://stackoverflow.com/questions/11730373/byte-array-to-bitmap-image
                Bitmap     bitmap = new Bitmap(colorWidth, colorHeight, System.Drawing.Imaging.PixelFormat.Format32bppRgb);
                BitmapData bmData = bitmap.LockBits(new Rectangle(0, 0, colorWidth, colorHeight), ImageLockMode.ReadWrite, bitmap.PixelFormat);
                Marshal.Copy(colorDataCopy, 0, bmData.Scan0, colorWidth * colorHeight * 4);
                bitmap.UnlockBits(bmData);

                using (var ms = new MemoryStream())
                {
                    bitmap.Save(ms, ImageFormat.Jpeg);
                    jpgData = ms.ToArray();

                    textInGui.text = "JPG length: " + jpgData.Length;

                    // Länge der Daten schicken
                    SendDataToClient(BitConverter.GetBytes(jpgData.Length));
                    SendDataToClient(jpgData);
                }

                lock (readyToSendLock)
                {
                    readyToSendMesh = false;
                }
            } catch (Exception e)
            {
                textInGui.text = e.Message;
            }
        }
    }
        public BitmapSource GreenScreenCustom(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            //

            ushort[]          _depthData        = null;
            byte[]            _bodyData         = null;
            byte[]            _colorData        = null;
            byte[]            _displayPixels    = null;
            ColorSpacePoint[] _colorPoints      = null;
            CoordinateMapper  _coordinateMapper = _sensor.CoordinateMapper;

            int         BYTES_PER_PIXEL = (PixelFormats.Bgr32.BitsPerPixel + 7) / 8;
            PixelFormat FORMAT          = PixelFormats.Bgra32;
            double      DPI             = 96.0;

            WriteableBitmap _bitmap = null;



            //

            int colorWidth  = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth  = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData     = new ushort[depthWidth * depthHeight];
                _bodyData      = new byte[depthWidth * depthHeight];
                _colorData     = new byte[colorWidth * colorHeight * BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * BYTES_PER_PIXEL];
                _colorPoints   = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap        = new WriteableBitmap(depthWidth, depthHeight, DPI, DPI, FORMAT, null);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);
                Array.Clear(rarray_final, 0, rarray_final.Length);
                Array.Clear(garray_final, 0, rarray_final.Length);
                Array.Clear(barray_final, 0, rarray_final.Length);


                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex   = ((colorY * colorWidth) + colorX) * BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;

                                barray_final[y, x] = _colorData[colorIndex];
                                garray_final[y, x] = _colorData[colorIndex + 1];
                                rarray_final[y, x] = _colorData[colorIndex + 2];

                                // For ROS


                                _finaldisplayPixels[displayIndex + 0] = _colorData[colorIndex + 2];
                                _finaldisplayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _finaldisplayPixels[displayIndex + 2] = _colorData[colorIndex];
                                _finaldisplayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                _bitmap.Lock();

                Marshal.Copy(_displayPixels, 0, _bitmap.BackBuffer, _displayPixels.Length);
                _bitmap.AddDirtyRect(new Int32Rect(0, 0, depthWidth, depthHeight));

                _bitmap.Unlock();
            }

            return(_bitmap);
        }
        /// <summary>
        /// Kinectにつないでる時だけ使えるコンストラクタ
        /// </summary>
        /// <param name="originalCoordinateMapper"></param>
        /// <param name="depthWidth"></param>
        /// <param name="depthHeight"></param>
        public LocalCoordinateMapper(CoordinateMapper originalCoordinateMapper, int depthWidth, int depthHeight)
        {
            this.depthWidth = depthWidth;
            this.depthHeight = depthHeight;
            this.depthFrameToCameraSpaceTable = originalCoordinateMapper.GetDepthFrameToCameraSpaceTable();

            int length = depthWidth * depthHeight;
            ushort[] depthBuffer = new ushort[length];
            ColorSpacePoint[] colorSpacePoints = new ColorSpacePoint[length];            
            this.depthFrameToColorSpacfeTable = new PointF[length];

            int depth = 1500; // なんでもいい
            for (int i = 0; i < length; i++)
            {
                depthBuffer[i] = (ushort)depth;
            }
            originalCoordinateMapper.MapDepthFrameToColorSpace(depthBuffer, colorSpacePoints);
            for (int i = 0; i < length; i++)
            {
                PointF tempP = new PointF();
                tempP.X = (float)(colorSpacePoints[i].X - D / depth);
                tempP.Y = colorSpacePoints[i].Y;
                this.depthFrameToColorSpacfeTable[i] = tempP;
            }
            Debug.WriteLine(1);
        }