private static void HandleKinectDepthCameraDataReceived(object sender, BaseCamera.DataReceivedEventArgs e) { Console.WriteLine("Depth data received at {0}", e.Timestamp); Image <Gray, byte> depthImage = new Image <Gray, byte>(e.Data.Width, e.Data.Height); depthImage.Bytes = e.Data.Data; depthImage.Save("depth_img_" + System.DateTime.Now.ToString().Replace(".", "").Replace("/", "").Replace(" ", "") + ".png"); }
private static void HandleKinectVideoCameraDataReceived(object sender, BaseCamera.DataReceivedEventArgs e) { Console.WriteLine("Video data received at {0}", e.Timestamp); Mat img = new Mat(e.Data.Height, e.Data.Width, DepthType.Cv8U, 3); img.Save("RGB_img_mat_" + System.DateTime.Now.ToString().Replace(".", "").Replace("/", "").Replace(" ", "") + ".png"); Image <Bgr, byte> depthImage = new Image <Bgr, byte>(e.Data.Width, e.Data.Height); depthImage.Bytes = e.Data.Data; depthImage.Save("rgb_img_" + System.DateTime.Now.ToString().Replace(".", "").Replace("/", "").Replace(" ", "") + ".png"); }
void DataReceived(object sender, BaseCamera.DataReceivedEventArgs e) { if (!BufferUsageDisabled) { VideoBufferInUse = true; switch (Mode) { case Modes.IR1024: case Modes.IR480: VideoTextureBuffer.SetData(e.Data.Data); break; case Modes.RGB1024: case Modes.RGB480: case Modes.Depth480: ConvertColorData(e.Data.Data, Prebuffer); VideoTextureBuffer.SetData(Prebuffer); break; } if (Mode != Modes.Depth480) { FrameCounter++; } VideoBufferInUse = false; } else { BufferReleaseARE.Set(); } FrameID++; OnRawFrameIn(e.Data.Data, FrameTypes.Color); if (RawSaveDataNext) { string path = RawSavePath + ".rwc"; try { File.WriteAllBytes(path, e.Data.Data); SendIOEvent(new IOEventArgs(path, true, null)); } catch (Exception t) { SendIOEvent(new IOEventArgs(path, false, t.Message)); } RawSaveDataNext = false; } }
// Callback for kinect camera, called when depth data stream is received private void HandleKinectDepthCameraDataReceived(object sender, BaseCamera.DataReceivedEventArgs e) { // byte[] kinectRawData = e.Data.Data; // ushort[] kinectDepthValues = new ushort[320 * 240]; // for (int i = 0; i < kinectDepthValues.Length; i ++) { // ushort s = BitConverter.ToUInt16 (e.Data.Data, i*2); // kinectDepthValues [i] = s; // } // Debug.Log(BitConverter.IsLittleEndian); // Debug.Log(BitConverter.ToUInt16 (e.Data.Data, 0)); depthTexture.LoadRawTextureData(e.Data.Data); depthTexture.Apply(); // terrainMaterial.SetTexture ("_Heightmap", depthTexture); }
void DepthDataReceived(object sender, BaseCamera.DataReceivedEventArgs e) { if (!BufferUsageDisabled) { DepthBufferInUse = true; switch (Mode) { case Modes.Depth480: if (DepthMeasurementWindow != null) { int x1 = DepthMeasurementWindow.Value.X, y1 = DepthMeasurementWindow.Value.Y; int x2 = DepthMeasurementWindow.Value.Width + x1, y2 = DepthMeasurementWindow.Value.Height + y1; int n, dmax = 10000, w = x2 - x1 + 1, h = y2 - y1 + 1, pos, x; byte[] data = e.Data.Data; for (int i = x1; i <= x2; i++) { n = 0; for (int j = y1; j <= y2; j++) { pos = (j * DepthWidth + i) * 2; x = data[pos + 1] << 8; x += data[pos]; n += x; } n /= h; if (n < dmax) { dmax = n; } } Depth = dmax; DepthCount++; DepthSum += Depth; DepthAverage = (double)DepthSum / DepthCount; } FrameCounter++; DepthTextureBuffer.SetData(e.Data.Data); break; } DepthBufferInUse = false; } else { BufferReleaseARE.Set(); } OnRawFrameIn(e.Data.Data, FrameTypes.Depth); if (RawSaveDepthNext) { string path = RawSavePath + ".rwd"; try { File.WriteAllBytes(path, e.Data.Data); SendIOEvent(new IOEventArgs(path, true, null)); } catch (Exception t) { SendIOEvent(new IOEventArgs(path, false, t.Message)); } RawSaveDepthNext = false; } }