/// <summary> /// Calculates the distance data for the current frame. /// </summary> /// <returns>Distance map with dimensions Width and Height.</returns> protected virtual FloatCameraImage CalcDistances() { CalcDistanceMap(); FloatCameraImage img = new FloatCameraImage(depthWidth, depthHeight); lock (this.depthFrameData) { lock (cameraLock) { for (int y = 0; y < depthHeight; y++) { for (int x = 0; x < depthWidth; x++) { // mirror image int idx = y * depthWidth + depthWidthMinusOne - x; img[y, x] = depthFrameData[idx] * distanceMap[y, depthWidthMinusOne - x] * 0.001f; } } } img.TimeStamp = timestampDepth; } return(img); }
private FloatCameraImage CalcChannelFloat(int channelNr) { FloatCameraImage result = new FloatCameraImage((int)Width, (int)Height); result.FrameNumber = FrameNumber; float scale = 1.0f / 255.0f; unsafe { byte *dataPtr = (byte *)imagePtr; dataPtr += channelNr; for (int y = 0; y < Height; y++) { for (int x = 0; x < Width; x++) { result[y, x] = (float)(*dataPtr) * scale; dataPtr += 4; } } } return(result); }
/// <summary>Computes (image) data for the fake Noise channel.</summary> /// <returns>Noisy image data.</returns> /// <seealso cref="Camera.CalcChannel"/> private FloatCameraImage CalcDummyNoiseChannel() { FloatCameraImage dummyNoiseImage = new FloatCameraImage(__noiseImageWidth, __noiseImageHeight); // TODO: Add channel data (here: noise) return(dummyNoiseImage); }
/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override unsafe void UpdateImpl() { bool synced = true; /* Wait until a frame is ready: Synchronized or Asynchronous */ pxcmStatus status; status = pp.AcquireFrame(synced); if (status < pxcmStatus.PXCM_STATUS_NO_ERROR) { log.Error(Name + ": error" + status.ToString()); } /* Display images */ sample = pp.QuerySample(); PXCMImage.ImageData depthData; sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out depthData); depthImage = new FloatCameraImage(sample.depth.info.width, sample.depth.info.height); memcpy(new IntPtr(depthImage.Data), depthData.planes[0], new UIntPtr((uint)sample.depth.info.width * (uint)sample.depth.info.height * (uint)sizeof(float))); sample.depth.ReleaseAccess(depthData); PXCMImage.ImageData irData; sample.ir.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_Y8, out irData); irImage = new ByteCameraImage(sample.ir.info.width, sample.ir.info.height); memcpy(new IntPtr(irImage.Data), irData.planes[0], new UIntPtr((uint)sample.ir.info.width * (uint)sample.ir.info.height * (uint)sizeof(byte))); sample.ir.ReleaseAccess(irData); pp.ReleaseFrame(); }
static void Main(string[] args) { Console.WriteLine("------------------------------------------"); Console.WriteLine("MetriCam 2 Minimal Sample."); Console.WriteLine("Get MetriCam 2 at http://www.metricam.net/"); Console.WriteLine("------------------------------------------"); // Create camera object Camera camera; try { camera = new Kinect2(); } catch (Exception e) { Console.WriteLine(Environment.NewLine + "Error: Could not create a PrimeSense camera object."); Console.WriteLine((e.InnerException == null) ? e.Message : e.InnerException.Message); Console.WriteLine("Press any key to exit."); Console.ReadKey(); return; } // Connect, get one frame, disconnect Console.WriteLine("Connecting camera"); camera.Connect(); Console.WriteLine("Fetching one frame"); camera.Update(); try { Console.WriteLine("Accessing color data"); ColorCameraImage img = (ColorCameraImage)camera.CalcChannel(ChannelNames.Color); Bitmap rgbBitmapData = img.ToBitmap(); } catch (ArgumentException ex) { Console.WriteLine(String.Format("Error getting channel {0}: {1}.", ChannelNames.Color, ex.Message)); } try { Console.WriteLine("Accessing distance data"); FloatCameraImage distancesData = (FloatCameraImage)camera.CalcChannel(ChannelNames.Distance); } catch (ArgumentException ex) { Console.WriteLine(String.Format("Error getting channel {0}: {1}.", ChannelNames.Distance, ex.Message)); } Console.WriteLine("Disconnecting camera"); camera.Disconnect(); Console.WriteLine("Finished. Press any key to exit."); Console.ReadKey(); }
/// <summary> /// Calculates the 3D point cloud from received data. /// </summary> /// <returns>point cloud</returns> private Point3fCameraImage Calc3D() { Point3fCameraImage result; lock (cameraLock) { result = new Point3fCameraImage(imageData.Width, imageData.Height); result.TimeStamp = (long)imageData.TimeStamp; float cx = imageData.CX; float cy = imageData.CY; float fx = imageData.FX; float fy = imageData.FY; float k1 = imageData.K1; float k2 = imageData.K2; float f2rc = imageData.F2RC; FloatCameraImage distances = CalcDistance(); for (int i = 0; i < imageData.Height; ++i) { for (int j = 0; j < imageData.Width; ++j) { int depth = (int)(distances[i, j] * 1000); // we map from image coordinates with origin top left and x horizontal (right) and y vertical // (downwards) to camera coordinates with origin in center and x to the left and y upwards (seen // from the sensor position) double xp = (cx - j) / fx; double yp = (cy - i) / fy; // correct the camera distortion double r2 = xp * xp + yp * yp; double r4 = r2 * r2; double k = 1 + k1 * r2 + k2 * r4; double xd = xp * k; double yd = yp * k; double s0 = Math.Sqrt(xd * xd + yd * yd + 1.0); double x = xd * depth / s0; double y = yd * depth / s0; double z = depth / s0 - f2rc; x /= 1000; y /= 1000; z /= 1000; // create point and save it Point3f point = new Point3f((float)-x, (float)-y, (float)z); result[i, j] = point; } } } return(result); }
private static FloatImage ConvertToFloatImage(FloatCameraImage fcImg) { FloatImage outImg; long timestamp = fcImg.TimeStamp; int frameNumber = fcImg.FrameNumber; unsafe { outImg = new FloatImage(fcImg.Width, fcImg.Height, fcImg.AbandonDataBuffer()); } outImg.TimeStamp = timestamp; outImg.FrameNumber = frameNumber; return(outImg); }
/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override unsafe void UpdateImpl() { bool synced = true; // Wait until a frame is ready: Synchronized or Asynchronous pxcmStatus status; status = pp.AcquireFrame(synced); if (status < pxcmStatus.PXCM_STATUS_NO_ERROR) { log.Error(Name + ": " + status.ToString()); return; } // get image sample = pp.QuerySample(); long imgTS = sample.color.timeStamp; if (imgTS <= lastTimeStamp) { throw new TimeoutException("THIS IS NOT A TIMEOUT!"); } lastTimeStamp = imgTS; // color image PXCMImage.ImageData colorData; if (sample.color != null) { sample.color.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out colorData); Bitmap bmp = new Bitmap(sample.color.info.width, sample.color.info.height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); var bmpData = bmp.LockBits(new Rectangle(0, 0, sample.color.info.width, sample.color.info.height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb); memcpy(bmpData.Scan0, colorData.planes[0], new UIntPtr(3 * (uint)sample.color.info.width * (uint)sample.color.info.height)); bmp.UnlockBits(bmpData); Bitmap bmp32 = bmp.Clone(new Rectangle(0, 0, widthColor, heightColor), System.Drawing.Imaging.PixelFormat.Format32bppPArgb); colorImage = new ColorCameraImage(bmp32); sample.color.ReleaseAccess(colorData); } // depth PXCMImage.ImageData depthData; if (sample.depth != null) { sample.depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out depthData); depthImage = new FloatCameraImage(sample.depth.info.width, sample.depth.info.height); CopyImageWithStride(sample.depth.info.width, sample.depth.info.height, 4, depthData, new IntPtr(depthImage.Data)); sample.depth.ReleaseAccess(depthData); } pp.ReleaseFrame(); }
/// <summary> /// Converts raw confidence image to [0, 1] range. /// According to sensor operating instructions, the highest bits indicates whether a pixel is valid or not. /// More information about invalid bits would be encoded in the remaining bits. /// </summary> /// <param name="rawConfidenceMap">Raw confidence image as provided by camera</param> /// <returns>Confidence image as float image in range [0, 1]</returns> private FloatCameraImage CalcConfidenceMap(ByteCameraImage rawConfidenceMap) { int width = rawConfidenceMap.Width; int height = rawConfidenceMap.Height; FloatCameraImage confidenceMap = new FloatCameraImage(width, height); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { bool invalid = (rawConfidenceMap[y, x] & 0x80) > 0; confidenceMap[y, x] = invalid ? 0.0f : 1.0f; } } return(confidenceMap); }
/// <summary> /// According to the xml records being sent by the camera as initial sequence, a confidence image comes with 16 bits per pixel such that the lowest possible value is 0 and the highest possible value is 65535. /// Scale this range to [0, 1]. /// </summary> /// <param name="rawConfidenceMap">Confidence map as provided by the camera</param> /// <returns>Confidence map as float image scaled to [0, 1] range</returns> private FloatCameraImage CalcConfidenceMap(UShortCameraImage rawConfidenceMap) { int width = rawConfidenceMap.Width; int height = rawConfidenceMap.Height; float scaling = 1.0f / (float)ushort.MaxValue; FloatCameraImage confidenceMap = new FloatCameraImage(width, height); for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { confidenceMap[y, x] = scaling * rawConfidenceMap[y, x]; } } return(confidenceMap); }
//private void TestBltCLI() //{ // // ip: 192.168.1.146 // BluetechnixCLI cam = new BluetechnixCLI(); // cam.IPAddress = "192.168.1.146"; // cam.Connect(); // Console.WriteLine("Got camera: {0}", cam.SerialNumber); // cam.MedianFilter = true; // cam.FrameAverageFilter = true; // cam.BilateralFilter = true; // cameras.Add(cam); // cam.ActivateChannel(ChannelNames.Color); // cam.SelectChannel(ChannelNames.Color); // bgWorkerGetFrames.RunWorkerAsync(); //} private void TestV3S() { VisionaryT cam = new VisionaryT(); cam.IPAddress = "192.168.1.137"; // DHCP cam.Connect(); // get distance frame { cam.ActivateChannel(ChannelNames.Distance); cam.SelectChannel(ChannelNames.Distance); cam.Update(); FloatCameraImage distances = (FloatCameraImage)cam.CalcSelectedChannel(); float min = distances[0]; float max = distances[0]; float mean = 0F; float mid = distances[(int)(cam.Width * (cam.Height / 2) + cam.Width / 2)]; for (int i = 0; i < cam.Height; ++i) { for (int j = 0; j < cam.Width; ++j) { float val = distances[i, j]; if (val < min) { min = val; } if (val > max) { max = val; } mean += val; } } mean /= cam.Height * cam.Width; Console.WriteLine("Distances: Min: {0}m Max: {1}m Mean: {2}m Mid: {3}m", min, max, mean, mid); } cam.ActivateChannel(ChannelNames.Intensity); cam.SelectChannel(ChannelNames.Intensity); cameras.Add(cam); bgWorkerGetFrames.RunWorkerAsync(); //cam.Disconnect(); }
private FloatCameraImage CalcLongExposureIR() { FloatCameraImage result = new FloatCameraImage(depthWidth, depthHeight); lock (cameraLock) { for (int y = 0; y < this.depthHeight; y++) { for (int x = 0; x < this.depthWidth; x++) { // mirror image int idx = y * depthWidth + depthWidthMinusOne - x; result[y, x] = (float)this.longExposureIRData[idx]; } } } return(result); }
/// <summary> /// Device-specific implementation of Update. /// Updates data buffers of all active channels with data of current frame. /// </summary> /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks> /// <seealso cref="Camera.Update"/> protected override void UpdateImpl() { if (!IsConnected) { return; } lock (updateLock) { if (!frameAvailable || latestAmplitudeImage == null || latestDistanceImage == null || latestPoint3fImage == null || latestZImage == null || latestRawConfidenceImage == null) { return; } amplitudeImage = latestAmplitudeImage; distanceImage = latestDistanceImage; point3fImage = latestPoint3fImage; zImage = latestZImage; rawConfidenceImage = latestRawConfidenceImage; frameAvailable = false; } }
private static void Capture(TriggeredStereoCamera cam, out FloatImage left, out FloatImage right, ref int cnt) { cam.Update(); cnt++; FloatCameraImage leftCamImg = cam.CalcChannel(ChannelNames.Left).ToFloatCameraImage(); FloatCameraImage rightCamImg = cam.CalcChannel(ChannelNames.Right).ToFloatCameraImage(); long ts_l = leftCamImg.TimeStamp; long ts_r = rightCamImg.TimeStamp; int fn_l = leftCamImg.FrameNumber; int fn_r = rightCamImg.FrameNumber; unsafe { left = new FloatImage(leftCamImg.Width, leftCamImg.Height, leftCamImg.AbandonDataBuffer()); right = new FloatImage(rightCamImg.Width, rightCamImg.Height, rightCamImg.AbandonDataBuffer()); } left.TimeStamp = ts_l; right.TimeStamp = ts_r; left.FrameNumber = fn_l; right.FrameNumber = fn_r; }
/// <summary> /// Calculates the intensity channel. /// </summary> /// <returns>Intensity image</returns> private FloatCameraImage CalcIntensity() { FloatCameraImage result; lock (cameraLock) { result = new FloatCameraImage(imageData.Width, imageData.Height); result.TimeStamp = (long)imageData.TimeStamp; int start = imageData.IntensityStartOffset; for (int i = 0; i < imageData.Height; ++i) { for (int j = 0; j < imageData.Width; ++j) { // take two bytes and create integer (little endian) uint value = (uint)imageBuffer[start + 1] << 8 | (uint)imageBuffer[start + 0]; result[i, j] = (float)value; start += 2; } } } return(result); }
/// <summary> /// Extracts a bitmap representation from the current raw-data. /// </summary> /// <returns>Bitmap representation of the current frame.</returns> private ColorCameraImage CalcColor() { ColorCameraImage result = new ColorCameraImage((int)Width, (int)Height); result.FrameNumber = FrameNumber; if (IsMonochrome) { FloatCameraImage img = CalcChannelFloat(0); float factor = (32 == bitsPerPixel) ? 255.0f : 1.0f; BitmapData bData = result.Data.LockBits(new Rectangle(0, 0, imageWidth, imageHeight), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb); for (int y = 0; y < result.Data.Height; y++) { byte *linePtr = (byte *)bData.Scan0 + y * bData.Stride; for (int x = 0; x < result.Data.Width; x++) { byte d = (byte)(img[y, x] * factor); * linePtr++ = d; * linePtr++ = d; * linePtr++ = d; } } result.Data.UnlockBits(bData); } else { BitmapData bData = result.Data.LockBits(new Rectangle(0, 0, imageWidth, imageHeight), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb); if (bData.Stride != stride) { result.Data.UnlockBits(bData); throw new InvalidOperationException("Stride is incompatible."); } CopyMemory((byte *)bData.Scan0, imagePtr, (uint)(stride * imageHeight)); result.Data.UnlockBits(bData); } return(result); }
private FloatCameraImage CalcChannelShort() { FloatCameraImage result = new FloatCameraImage((int)Width, (int)Height); result.FrameNumber = FrameNumber; unsafe { short *dataPtr = (short *)imagePtr; for (int y = 0; y < Height; y++) { for (int x = 0; x < Width; x++) { result[y, x] = (float)(*dataPtr); dataPtr++; } } } return(result); }
private FloatCameraImage CalcAmplitudes() { FloatCameraImage img = new FloatCameraImage(depthWidth, depthHeight); lock (this.irFrameData) { lock (cameraLock) { for (int y = 0; y < this.depthHeight; y++) { for (int x = 0; x < this.depthWidth; x++) { // mirror image int idx = y * depthWidth + depthWidthMinusOne - x; img[y, x] = (float)this.irFrameData[idx]; } } } img.TimeStamp = timestampIR; } return(img); }
unsafe public void FloatImage() { float min = 1; float max = 0; if (camera.IsConnected) { camera.Update(); FloatCameraImage img = (FloatCameraImage)camera.CalcChannel(MetriCam2.ChannelNames.ZImage); float * img_float = img.Data; for (int i = 0; i < img.Width * img.Height; i++) { if (img_float[i] > max) { max = img_float[i]; } else if (img_float[i] < min) { min = img_float[i]; } } Texture2D tex = new Texture2D(img.Width, img.Height); Color[] depthColor = new Color[img.Width * img.Height]; float temp = max - min; int fool = 30; int tmp = 5; for (int i = 0; i < img.Width * img.Height; i++) { if (img_float[i] > 800 && img_float[i] < 1200) { depthColor[i] = BlackToWhite.Evaluate((img_float[i] - 800) / 400); } else { depthColor[i] = Color.black; } //if (img_float[i] % 100 <50) //{ // depthColor[i] = Color.black; //} //else //{ // depthColor[i] = Color.white; //} //depthColor[i] = BlackToWhite.Evaluate(img_float[i] / temp); } tex.SetPixels(depthColor); tex.Apply(); DepthImg.texture = tex; img.Dispose(); } }
private void backgroundWorkerGetFrames_DoWork(object sender, DoWorkEventArgs e) { while (!backgroundWorkerGetFrames.CancellationPending) { // capture a new frame try { //cam.Invoke("StartStreams", null); cam.Update(); //cam.Invoke("StopStreams", null); } catch (Exception ex) { GC.KeepAlive(ex); cam = new CameraClient("192.168.1.72", 8081, 8082, "MetriCam2.Cameras.Kinect2", "MetriCam2.Cameras.Kinect2"); cam.Connect(); //cam.Invoke("StopStreams", null); } Point3fImage p3Image = null; if (channel3DName == ChannelNames.Point3DImage) { Point3fCameraImage image3D = (Point3fCameraImage)cam.CalcChannel(ChannelNames.Point3DImage); p3Image = new Point3fImage(ref image3D); } else if (channel3DName == ChannelNames.Distance || channel3DName == ChannelNames.ZImage) { FloatCameraImage image3D = (FloatCameraImage)cam.CalcChannel(channel3DName); p3Image = new Point3fImage(new FloatImage(ref image3D), projectiveTransformation, channel3DName == ChannelNames.ZImage); } FloatImage ir = ConvertToFloatImage(cam.CalcChannel(channel2DName).ToFloatCameraImage()); Bitmap bitmap = ir.ToBitmap(); //secondBitmap = zImage.ToBitmap(); // set the picturebox-bitmap in the main thread to avoid concurrency issues (a few helper methods required, easier/nicer solutions welcome). this.InvokeSetBmp(bitmap); MaskImage mask = new MaskImage(p3Image.Width, p3Image.Height); ir = ir.Normalize(); for (int y = 0; y < mask.Height; y++) { for (int x = 0; x < mask.Width; x++) { Point3f p = p3Image[y, x]; if (p.X > -99f && p.X < 99f && p.Y > -99f & p.Y < 99f && p.Z > 0 && p.Z < 99f) { mask[y, x] = 0xff; } } } p3Image.EliminateFlyingPixels(5, 0.005f, 0.2f); p3Image.Mask = mask; TriangleIndexList til = new TriangleIndexList(p3Image, false, true); if (renderTil == null) { renderTil = new Metri3D.Objects.RenderTriangleIndexList(til, Color.White); panel3D.AddRenderObject(renderTil); } else { renderTil.UpdateData(til, Color.White); } panel3D.Invalidate(); } }
private void UpdateWorker_DoWork(object sender, DoWorkEventArgs e) { while (!updateWorker.CancellationPending) { lock (updateLock) { byte[] buffer = new byte[16]; Receive(clientSocket, buffer, 0, 16, 300000); var str = Encoding.Default.GetString(buffer, 5, 9); int frameSize; Int32.TryParse(str, out frameSize); byte[] frameBuffer = new byte[frameSize]; Receive(clientSocket, frameBuffer, 0, frameSize, 300000); FloatCameraImage localAmplitudeImage = new FloatCameraImage(width, height); FloatCameraImage localDistanceImage = new FloatCameraImage(width, height); FloatCameraImage localXImage = new FloatCameraImage(width, height); FloatCameraImage localYImage = new FloatCameraImage(width, height); FloatCameraImage localZImage = new FloatCameraImage(width, height); Point3fCameraImage localPoint3fImage = new Point3fCameraImage(width, height); ByteCameraImage localRawConfidenceImage = new ByteCameraImage(width, height); // All units in mm, thus we need to divide by 1000 to obtain meters float factor = 1 / 1000.0f; // Response shall start with ASCII string "0000star" using (MemoryStream stream = new MemoryStream(frameBuffer)) { using (BinaryReader reader = new BinaryReader(stream)) { // After the start sequence of 8 bytes, the images follow in chunks // Each image chunk contains 36 bytes header including information such as image dimensions and pixel format. // We do not need to parse the header of each chunk as it is should be the same for every frame (except for a timestamp) reader.BaseStream.Position = 44; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localAmplitudeImage[y, x] = (float)reader.ReadUInt16(); } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localDistanceImage[y, x] = (float)reader.ReadUInt16(); localDistanceImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localXImage[y, x] = (float)reader.ReadInt16(); localXImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localYImage[y, x] = (float)reader.ReadInt16(); localYImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localZImage[y, x] = (float)reader.ReadInt16(); localZImage[y, x] *= factor; } } reader.BaseStream.Position += 36; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localRawConfidenceImage[y, x] = reader.ReadByte(); } } } for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { localPoint3fImage[y, x] = new Point3f(localXImage[y, x], localYImage[y, x], localZImage[y, x]); } } } latestAmplitudeImage = localAmplitudeImage; latestDistanceImage = localDistanceImage; latestPoint3fImage = localPoint3fImage; latestZImage = localZImage; latestRawConfidenceImage = localRawConfidenceImage; frameAvailable = true; } } }