public static void Test_GetCameraPoints(DepthMetaData myDepthMetaData, CoordinateMapper myCoordinateMapper) { //test CameraSpacePoint[] cameraMappedPoints = new CameraSpacePoint[ColorMetaData.XColorMaxKinect * ColorMetaData.YColorMaxKinect]; myCoordinateMapper.MapColorFrameToCameraSpace(myDepthMetaData.FrameData, cameraMappedPoints); List <CameraSpacePoint> myMappedPoints = new List <CameraSpacePoint>(); for (int i = 0; i < cameraMappedPoints.Length; ++i) { double colorMappedToDepthX = cameraMappedPoints[i].X; double colorMappedToDepthY = cameraMappedPoints[i].Y; if (!double.IsNegativeInfinity(colorMappedToDepthX) && !double.IsNegativeInfinity(colorMappedToDepthY)) { myMappedPoints.Add(cameraMappedPoints[i]); //System.Diagnostics.Debug.WriteLine("--> Camera Points : " + cameraMappedPoints[colorIndex].X.ToString() + " : " + cameraMappedPoints[colorIndex].Y.ToString() + " : " + cameraMappedPoints[colorIndex].Z.ToString()); } } System.Diagnostics.Debug.WriteLine("--> Number of ColorSpacePoints: " + myMappedPoints.Count.ToString()); //System.Diagnostics.Debug.WriteLine("--> Old.Camera Points : " + cameraMappedPoints[0].X.ToString() + " : " + cameraMappedPoints[0].X.ToString() + " : " + cameraMappedPoints[0].Z.ToString() + " : "); DepthSpacePoint[] colorMappedToDepthPoints = new DepthSpacePoint[ColorMetaData.XColorMaxKinect * ColorMetaData.YColorMaxKinect]; myCoordinateMapper.MapColorFrameToDepthSpace(myDepthMetaData.FrameData, colorMappedToDepthPoints); List <DepthSpacePoint> myMappedPointsDepth = new List <DepthSpacePoint>(); for (int i = 0; i < colorMappedToDepthPoints.Length; ++i) { double colorMappedToDepthX = colorMappedToDepthPoints[i].X; double colorMappedToDepthY = colorMappedToDepthPoints[i].Y; if (!double.IsNegativeInfinity(colorMappedToDepthX) && !double.IsNegativeInfinity(colorMappedToDepthY)) { myMappedPointsDepth.Add(colorMappedToDepthPoints[i]); //System.Diagnostics.Debug.WriteLine("--> Camera Points : " + cameraMappedPoints[colorIndex].X.ToString() + " : " + cameraMappedPoints[colorIndex].Y.ToString() + " : " + cameraMappedPoints[colorIndex].Z.ToString()); } } System.Diagnostics.Debug.WriteLine("--> Number of ColorDepthPoints: " + myMappedPointsDepth.Count.ToString()); //System.Diagnostics.Debug.WriteLine("--> Depth Points : " + colorMappedToDepthPoints[0].X.ToString() + " : " + colorMappedToDepthPoints[0].X.ToString() + " : " ); }
void updateKinect() { if (!kinectIsInit || !sensor.IsOpen || depthReader == null) { Debug.Log("Kinect is not init or open"); initKinect(); return; } DepthFrame depthFrame = depthReader.AcquireLatestFrame(); if (depthFrame != null) { depthFrame.CopyFrameDataToArray(frameData); mapper.MapDepthFrameToCameraSpace(frameData, cameraPoints); depthFrame.Dispose(); for (int i = 0; i < frameData.Length; ++i) { depthTexData[3 * i + 0] = (byte)(frameData[i] * 1f / 20); depthTexData[3 * i + 1] = (byte)(frameData[i] * 1f / 20); depthTexData[3 * i + 2] = (byte)(frameData[i] * 1f / 20); } depthTex.LoadRawTextureData(depthTexData); depthTex.Apply(); } ColorFrame colorFrame = colorReader.AcquireLatestFrame(); if (colorFrame != null) { colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Rgba); colorFrame.Dispose(); colorTex.LoadRawTextureData(colorData); colorTex.Apply(); mapper.MapColorFrameToCameraSpace(frameData, colorPoints); } }
/// <summary> /// Prepare image data to send /// </summary> /// <param name="depthImageFrame"></param> /// <returns></returns> public bool PrepareImageData() { WriteableBitmap colorBitmap = this.ImageSource as WriteableBitmap; WriteableBitmap depthBitmap = this.DepthSource as WriteableBitmap; this.colorData = new Byte[colorBitmap.PixelHeight * colorBitmap.PixelWidth * bgr32BytesPerPixel]; this.depthData = new Byte[depthBitmap.PixelHeight * depthBitmap.PixelWidth * gray8BytesPerPixel]; colorBitmap.CopyPixels(this.colorData, colorBitmap.BackBufferStride, 0); depthBitmap.CopyPixels(this.depthData, depthBitmap.BackBufferStride, 0); float[] pointTemp = new float[1920 * 1080 * 3]; //3 dimension CameraSpacePoint[] cameraSpacePoints = new CameraSpacePoint[1920 * 1080]; ushort[] depthData = new ushort[512 * 424]; Microsoft.Kinect.KinectSensor sensor = Microsoft.Kinect.KinectSensor.GetDefault(); DepthFrameReader e = sensor.DepthFrameSource.OpenReader(); DepthFrame eFrame = e.AcquireLatestFrame(); eFrame.CopyFrameDataToArray(depthData); // Get 3D point coordinates CoordinateMapper coordinateMapper = sensor.CoordinateMapper; coordinateMapper.MapColorFrameToCameraSpace(depthData, cameraSpacePoints); // Save 3D point coordinates to point3D array for (int i = 0; i < 1920 * 1080; ++i) { pointTemp[i * 3] = cameraSpacePoints[i].X; pointTemp[i * 3 + 1] = cameraSpacePoints[i].Y; pointTemp[i * 3 + 2] = cameraSpacePoints[i].Z; } depthPointsInColorCoordinate = new byte[1920 * 1080 * 3 * sizeof(float)]; Buffer.BlockCopy(pointTemp, 0, depthPointsInColorCoordinate, 0, 1920 * 1080 * 3 * sizeof(float)); return(true); }
private void calculateScanFromDepth(ushort[] depthArray) { CoordinateMapper mapper = this.kinect.CoordinateMapper; //CameraIntrinsics intrinsics = mapper.GetDepthCameraIntrinsics(); CameraSpacePoint[] cameraSpace = new CameraSpacePoint[512 * 424]; mapper.MapDepthFrameToCameraSpace(depthArray, cameraSpace); // Console.WriteLine(cameraSpace[512 * 250].X); int i = 0, j = 0; float min = float.MaxValue, max = float.MinValue; int min_index = 0; int max_index = 0; // float yaw_angle = 0; float y = 0; float floor_offset = .425f; for (i = 0; i < 512; i++) { min = float.MaxValue; max = float.MinValue; min_index = 0; max_index = 0; // yaw_angle = 125.0f - (70f / 512f) * i; for (j = i; j < 512 * 423 + i; j += 512) { // float dist =(float) Math.Sqrt(Math.Pow(cameraSpace[j].Y, 2) + Math.Pow(cameraSpace[j].X, 2) + Math.Pow(cameraSpace[j].Z, 2)); float dist = cameraSpace[j].Z; if (dist < min && depthArray[j] != 0u) { //min = depthArray[j]; // min_angle = 30f - j/512 * (60f / 424f); // y = depthArray[j] * (float)Math.Sin(min_angle * Math.PI / 180) + floor_offset; y = cameraSpace[j].Y + floor_offset; if (y > 0.050 && y < 2.400) { min = cameraSpace[j].Z; min_index = j; } } if (dist > max) { // max = depthArray[j]; // max_angle = 30f - j/512 * (60f / 424f); y = cameraSpace[j].Y + floor_offset;// depthArray[j] * (float)Math.Sin(max_angle * Math.PI / 180) + floor_offset; if (y > .050 && y < 2.000) { max = cameraSpace[j].Z; max_index = j; } } } scan2DArray[6 * i] = min; scan2DArray[6 * i + 1] = cameraSpace[min_index].X; scan2DArray[6 * i + 2] = cameraSpace[min_index].Y; scan2DArray[6 * i + 3] = max; scan2DArray[6 * i + 4] = cameraSpace[max_index].X; scan2DArray[6 * i + 5] = cameraSpace[max_index].Y; } cameraSpace = new CameraSpacePoint[1920 * 1080]; mapper.MapColorFrameToCameraSpace(depthArray, cameraSpace); CameraSpacePoint p = cameraSpace[target_y * 1920 + target_x]; Debug.Print("Cone (x,y,z) is (" + p.X.ToString() + ", " + p.Y.ToString() + ", " + p.Z.ToString() + ")\n"); scan2DArray[6 * 512] = p.X; scan2DArray[6 * 512 + 1] = p.Y; scan2DArray[6 * 512 + 2] = p.Z; target_y = 0; target_z = 0; target_x = 0; }
private byte[] ProcessGreenScreenFrame(bool highQuality) { byte[] returnValue = null; if (_coordinateMapper != null && _rawDepthPixels != null && _cameraSpacePoints != null) { _coordinateMapper.MapColorFrameToCameraSpace(_rawDepthPixels, _cameraSpacePoints); Box colorResolution = _frameResolutions[SourceType.COLOR]; byte[] colorBuffer = _displayableBuffers[SourceType.COLOR]; byte[] rawDepthMask = new byte[colorResolution.Area]; byte[] cleanedDepthMask = null; byte[] greenScreenBuffer = _displayableBuffers[SourceType.GREEN_SCREEN]; byte[] backgroundBuffer = _displayableBuffers[SourceType.BACKGROUND]; int outputBytesPerPixel = _outputPixelFormat.BitsPerPixel / 8; int maskMinValue = 50; int outIndex; // Build the raw mask from depth data int colorPixelIndex = 0; foreach (CameraSpacePoint thisPoint in _cameraSpacePoints) { float depthValue = thisPoint.Z; if (!float.IsNegativeInfinity(depthValue) && depthValue <= _depthThresholdInMeters) { rawDepthMask[colorPixelIndex] = 255; } colorPixelIndex++; } if (highQuality) { // Clean up the mask // TODO: Use a fast native library to do this cleanedDepthMask = PerformConvolution(rawDepthMask, colorResolution, _boxBlur_7_by_7); } else { cleanedDepthMask = rawDepthMask; } for (colorPixelIndex = 0; colorPixelIndex < colorResolution.Area; ++colorPixelIndex) { int colorBufferIndex = outputBytesPerPixel * colorPixelIndex; if (highQuality) { //flip the image horizotally int inCol = colorPixelIndex % colorResolution.Width; int inRow = colorPixelIndex / colorResolution.Width; int flippedCol = (colorResolution.Width - 1) - inCol; outIndex = outputBytesPerPixel * ((colorResolution.Width * inRow) + flippedCol); } else { outIndex = colorBufferIndex; } byte thisMaskValue = cleanedDepthMask[colorPixelIndex]; float normalizedMaskValue = thisMaskValue > maskMinValue ? ((float)thisMaskValue - (float)maskMinValue) / (255f - (float)maskMinValue) : 0f; for (int i = 0; i < outputBytesPerPixel - 1; ++i) { byte backgroundPixelValue = backgroundBuffer[colorBufferIndex + i]; byte foregroundPixelValue = colorBuffer[colorBufferIndex + i]; byte minValue = Math.Min(backgroundPixelValue, foregroundPixelValue); byte maxValue = Math.Max(backgroundPixelValue, foregroundPixelValue); float range = maxValue - minValue; float direction = backgroundPixelValue > foregroundPixelValue ? -1f : 1f; byte outputValue = (byte)((float)backgroundPixelValue + (direction * (range * normalizedMaskValue))); greenScreenBuffer[outIndex + i] = outputValue; } greenScreenBuffer[outIndex + (outputBytesPerPixel - 1)] = 255; } returnValue = greenScreenBuffer; } return(returnValue); }