/// <summary> /// Calculates the 3-D point cloud using the native SDK functions. /// </summary> /// <returns>3-D point coordinates in meters.</returns> /// <remarks>Format is [height, width, coordinate] with coordinate in {0,1,2} for x, y and z.</remarks> private Point3fCameraImage Calc3DCoordinates() { CameraSpacePoint[] worldPoints = new CameraSpacePoint[depthHeight * depthWidth]; Point3fCameraImage img = new Point3fCameraImage(depthWidth, depthHeight); lock (this.depthFrameData) { lock (cameraLock) { coordinateMapper.MapDepthFrameToCameraSpace(depthFrameData, worldPoints); } for (int y = 0; y < depthHeight; y++) { for (int x = 0; x < depthWidth; x++) { // mirror image int idx = y * depthWidth + depthWidthMinusOne - x; img[y, x] = new Point3f(worldPoints[idx].X * -1, worldPoints[idx].Y * -1, worldPoints[idx].Z); } } img.TimeStamp = timestampDepth; } return(img); }
/// <summary> /// Calculates the 3D point cloud from received data. /// </summary> /// <returns>point cloud</returns> private Point3fCameraImage Calc3D() { Point3fCameraImage result; lock (cameraLock) { result = new Point3fCameraImage(imageData.Width, imageData.Height); result.TimeStamp = (long)imageData.TimeStamp; float cx = imageData.CX; float cy = imageData.CY; float fx = imageData.FX; float fy = imageData.FY; float k1 = imageData.K1; float k2 = imageData.K2; float f2rc = imageData.F2RC; FloatCameraImage distances = CalcDistance(); for (int i = 0; i < imageData.Height; ++i) { for (int j = 0; j < imageData.Width; ++j) { int depth = (int)(distances[i, j] * 1000); // we map from image coordinates with origin top left and x horizontal (right) and y vertical // (downwards) to camera coordinates with origin in center and x to the left and y upwards (seen // from the sensor position) double xp = (cx - j) / fx; double yp = (cy - i) / fy; // correct the camera distortion double r2 = xp * xp + yp * yp; double r4 = r2 * r2; double k = 1 + k1 * r2 + k2 * r4; double xd = xp * k; double yd = yp * k; double s0 = Math.Sqrt(xd * xd + yd * yd + 1.0); double x = xd * depth / s0; double y = yd * depth / s0; double z = depth / s0 - f2rc; x /= 1000; y /= 1000; z /= 1000; // create point and save it Point3f point = new Point3f((float)-x, (float)-y, (float)z); result[i, j] = point; } } } return(result); }
/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override void UpdateImpl() { if (!IsConnected) { return; } lock (updateLock) { if (!frameAvailable || latestAmplitudeImage == null || latestDistanceImage == null || latestPoint3fImage == null || latestZImage == null || latestRawConfidenceImage == null) { return; } amplitudeImage = latestAmplitudeImage; distanceImage = latestDistanceImage; point3fImage = latestPoint3fImage; zImage = latestZImage; rawConfidenceImage = latestRawConfidenceImage; frameAvailable = false; } }
private void backgroundWorkerGetFrames_DoWork(object sender, DoWorkEventArgs e) { while (!backgroundWorkerGetFrames.CancellationPending) { // capture a new frame try { //cam.Invoke("StartStreams", null); cam.Update(); //cam.Invoke("StopStreams", null); } catch (Exception ex) { GC.KeepAlive(ex); cam = new CameraClient("192.168.1.72", 8081, 8082, "MetriCam2.Cameras.Kinect2", "MetriCam2.Cameras.Kinect2"); cam.Connect(); //cam.Invoke("StopStreams", null); } Point3fImage p3Image = null; if (channel3DName == ChannelNames.Point3DImage) { Point3fCameraImage image3D = (Point3fCameraImage)cam.CalcChannel(ChannelNames.Point3DImage); p3Image = new Point3fImage(ref image3D); } else if (channel3DName == ChannelNames.Distance || channel3DName == ChannelNames.ZImage) { FloatCameraImage image3D = (FloatCameraImage)cam.CalcChannel(channel3DName); p3Image = new Point3fImage(new FloatImage(ref image3D), projectiveTransformation, channel3DName == ChannelNames.ZImage); } FloatImage ir = ConvertToFloatImage(cam.CalcChannel(channel2DName).ToFloatCameraImage()); Bitmap bitmap = ir.ToBitmap(); //secondBitmap = zImage.ToBitmap(); // set the picturebox-bitmap in the main thread to avoid concurrency issues (a few helper methods required, easier/nicer solutions welcome). this.InvokeSetBmp(bitmap); MaskImage mask = new MaskImage(p3Image.Width, p3Image.Height); ir = ir.Normalize(); for (int y = 0; y < mask.Height; y++) { for (int x = 0; x < mask.Width; x++) { Point3f p = p3Image[y, x]; if (p.X > -99f && p.X < 99f && p.Y > -99f & p.Y < 99f && p.Z > 0 && p.Z < 99f) { mask[y, x] = 0xff; } } } p3Image.EliminateFlyingPixels(5, 0.005f, 0.2f); p3Image.Mask = mask; TriangleIndexList til = new TriangleIndexList(p3Image, false, true); if (renderTil == null) { renderTil = new Metri3D.Objects.RenderTriangleIndexList(til, Color.White); panel3D.AddRenderObject(renderTil); } else { renderTil.UpdateData(til, Color.White); } panel3D.Invalidate(); } }
private void UpdateWorker_DoWork(object sender, DoWorkEventArgs e) { while (!updateWorker.CancellationPending) { lock (updateLock) { byte[] buffer = new byte[16]; Receive(clientSocket, buffer, 0, 16, 300000); var str = Encoding.Default.GetString(buffer, 5, 9); int frameSize; Int32.TryParse(str, out frameSize); byte[] frameBuffer = new byte[frameSize]; Receive(clientSocket, frameBuffer, 0, frameSize, 300000); FloatCameraImage localAmplitudeImage = new FloatCameraImage(width, height); FloatCameraImage localDistanceImage = new FloatCameraImage(width, height); FloatCameraImage localXImage = new FloatCameraImage(width, height); FloatCameraImage localYImage = new FloatCameraImage(width, height); FloatCameraImage localZImage = new FloatCameraImage(width, height); Point3fCameraImage localPoint3fImage = new Point3fCameraImage(width, height); ByteCameraImage localRawConfidenceImage = new ByteCameraImage(width, height); // All units in mm, thus we need to divide by 1000 to obtain meters float factor = 1 / 1000.0f; // Response shall start with ASCII string "0000star" using (MemoryStream stream = new MemoryStream(frameBuffer)) { using (BinaryReader reader = new BinaryReader(stream)) { // After the start sequence of 8 bytes, the images follow in chunks // Each image chunk contains 36 bytes header including information such as image dimensions and pixel format. // We do not need to parse the header of each chunk as it is should be the same for every frame (except for a timestamp) reader.BaseStream.Position = 44; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localAmplitudeImage[y, x] = (float)reader.ReadUInt16(); } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localDistanceImage[y, x] = (float)reader.ReadUInt16(); localDistanceImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localXImage[y, x] = (float)reader.ReadInt16(); localXImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localYImage[y, x] = (float)reader.ReadInt16(); localYImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localZImage[y, x] = (float)reader.ReadInt16(); localZImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localRawConfidenceImage[y, x] = reader.ReadByte(); } } } for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localPoint3fImage[y, x] = new Point3f(localXImage[y, x], localYImage[y, x], localZImage[y, x]); } } } latestAmplitudeImage = localAmplitudeImage; latestDistanceImage = localDistanceImage; latestPoint3fImage = localPoint3fImage; latestZImage = localZImage; latestRawConfidenceImage = localRawConfidenceImage; frameAvailable = true; } } }