private async void Window_Loaded(object sender, RoutedEventArgs e) { while (running) { //code is needed here: using pointcloud to receive the data from the transform using (Image transformedDepth = new Image(ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16))) using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); })) using (Image colorImage = transform.ColorImageToDepthCamera(capture)) using (Image xyzImage = transform.DepthImageToPointCloud(capture.Depth)) { this.transform.DepthImageToColorCamera(capture, transformedDepth); this.StatusText = "Received Capture: " + capture.Depth.DeviceTimestamp; var color = capture.Color; var depth = capture.Depth; MaskColor(color, transformedDepth); //for display on the screen if (flag == true) { state = primaryInt.getRobotState(); try { info.Text = "please wait for a while ..."; GetPointcloud(colorImage, depth, xyzImage, state); } catch (Exception) { info.Text = "error existing when getting the pointcloud data!"; } } //for getting the point coordinates } } }
public void ProcessCameraFrame() { if (_IsCameraStarted) { Capture capture = _KinectSensor.GetCapture(); if (capture.Color != null) { _RawColorImage = capture.Color.Memory.ToArray(); _TransformedColorImage = _Transformation.ColorImageToDepthCamera(capture).Memory.ToArray(); } if (capture.Depth != null) { Image depthImage = capture.Depth; Image transformedDepthImage = _Transformation.DepthImageToColorCamera(capture); _RawDepthImage = depthImage.GetPixels <short>().ToArray(); _TransformedDepthImage = transformedDepthImage.GetPixels <short>().ToArray(); _PointCloud = _Transformation.DepthImageToPointCloud(depthImage) .GetPixels <Short3>().ToArray(); } _ImuSample = _KinectSensor.GetImuSample(); capture.Dispose(); } }
private async Task KinectLoop() { while (true) { using (Capture capture = await Task.Run(() => kinect.GetCapture()).ConfigureAwait(true)) { //Getting color information Image colorImage = transformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray(); //Getting vertices of point cloud Image xyzImage = transformation.DepthImageToPointCloud(capture.Depth); Short3[] xyzArray = xyzImage.GetPixels <Short3>().ToArray(); for (int i = 0; i < num; i++) { vertices[i].x = xyzArray[i].X * 0.001f; vertices[i].y = -xyzArray[i].Y * 0.001f;//上下反転 vertices[i].z = xyzArray[i].Z * 0.001f; colors[i].b = colorArray[i].B; colors[i].g = colorArray[i].G; colors[i].r = colorArray[i].R; colors[i].a = 255; } mesh.vertices = vertices; mesh.colors32 = colors; mesh.RecalculateBounds(); } } }
private async Task CameraLoop(Device device) { while (running) { if (collectCameraData) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { switch (transformationMode) { case TransformationMode.ColorToDepth: finalColor = transformation.ColorImageToDepthCamera(capture); finalDepth = capture.Depth; break; case TransformationMode.DepthToColor: finalColor = capture.Color; finalDepth = transformation.DepthImageToColorCamera(capture); break; case TransformationMode.None: finalColor = capture.Color; finalDepth = capture.Depth; break; } processor.matrixSize = new Vector3Int((int)(finalColor.WidthPixels * volumeScale.x), (int)(finalColor.HeightPixels * volumeScale.y), (int)((depthRanges[(int)device.CurrentDepthMode].y - depthRanges[(int)device.CurrentDepthMode].x) / 11 * volumeScale.z)); if (processor.colorTexture == null) { processor.colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false); colorData = new byte[finalColor.Memory.Length]; } if (processor.depthTexture == null) { processor.depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); processor.oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); depthData = new byte[finalDepth.Memory.Length]; } colorData = finalColor.Memory.ToArray(); processor.colorTexture.LoadRawTextureData(colorData); processor.colorTexture.Apply(); depthData = finalDepth.Memory.ToArray(); processor.depthTexture.LoadRawTextureData(depthData); processor.depthTexture.Apply(); processor.ProcessKinectData(); Graphics.CopyTexture(processor.depthTexture, processor.oldDepthTexture); } } else { await Task.Run(() => { }); } } }
private async UniTaskVoid LoopResultDataUpdater() { while (true) { using (Capture capture = await Task.Run(() => _kinect.GetCapture()).ConfigureAwait(true)) { Image colorImage = _kinectTransformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray(); Image xyzImage = _kinectTransformation.DepthImageToPointCloud(capture.Depth); Short3[] xyzArray = xyzImage.GetPixels <Short3>().ToArray(); Image depthImage = capture.Depth; ushort[] depthArray = depthImage.GetPixels <ushort>().ToArray(); for (int i = 0; i < DeviceInfo.TotalPixelNum; i++) { _resultData.Vertexes[i] = new Vector3( xyzArray[i].X * 0.001f, -xyzArray[i].Y * 0.001f, xyzArray[i].Z * 0.001f ); _resultData.Colors[i] = new Color32( colorArray[i].R, colorArray[i].G, colorArray[i].B, colorArray[i].A ); int depthVal = (int)(255 - (255 * (depthArray[i] - 500) / 5000.0)); if (depthVal < 0 || depthVal > 255) { depthVal = 255; } _depthImageColors[i] = new Color32( (byte)depthVal, (byte)depthVal, (byte)depthVal, 255 ); } _resultData.RGBTexture.SetPixels32(_resultData.Colors); _resultData.RGBTexture.Apply(); _resultData.DepthTexture.SetPixels32(_depthImageColors); _resultData.DepthTexture.Apply(); _subjectResultData.OnNext(_resultData); } } }
private async Task KinectLoop() { while (true) { using (Capture capture = await Task.Run(() => kinect.GetCapture()).ConfigureAwait(true)) { //Getting color information _colorImage = transformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = _colorImage.GetPixels <BGRA>().ToArray(); UInt16[] depthArray = capture.Depth.GetPixels <UInt16>().ToArray(); for (int i = 0; i < numPoints; i++) { rgbImageColors[i].b = (float)colorArray[i].B / 255f; rgbImageColors[i].g = (float)colorArray[i].G / 255f; rgbImageColors[i].r = (float)colorArray[i].R / 255f; rgbImageColors[i].a = (float)depthArray[i]; } texture.SetPixels(rgbImageColors); texture.Apply(); Graphics.Blit(texture, renderTexture, renderer.material); try { NativeArray <Color32> requestOutput = new NativeArray <Color32>(_width * _height * 2, Unity.Collections.Allocator.TempJob); gpuRequest = UnityEngine.Rendering.AsyncGPUReadback.RequestIntoNativeArray <Color32>(ref requestOutput, renderTexture, 0, (request) => { if (onDestroyCalled) { return; } Color32[] RGBDTextureColors = requestOutput.ToArray(); for (int i = 0; i < RGBDTextureColors.Length; i++) { _imageIntArray[i] = RGBDTextureColors[i].a << 24 | RGBDTextureColors[i].b << 16 | RGBDTextureColors[i].g << 8 | RGBDTextureColors[i].r; } Marshal.Copy(_imageIntArray, 0, _imageDataBuffer, _imageIntArray.Length); requestOutput.Dispose(); }); } catch (Exception e) { Debug.LogError(e); } } } }
private async Task CameraLoop(Device device) { Material matt = mesh.material; while (running) { if (collectCameraData) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { switch (transformationMode) { case TransformationMode.ColorToDepth: finalColor = transformation.ColorImageToDepthCamera(capture); finalDepth = capture.Depth; break; case TransformationMode.DepthToColor: finalColor = capture.Color; finalDepth = transformation.DepthImageToColorCamera(capture); break; case TransformationMode.None: finalColor = capture.Color; finalDepth = capture.Depth; break; } if (depthTexture == null) { depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); depthData = new byte[finalDepth.Memory.Length]; print("Made Depth Texture"); } depthData = finalDepth.Memory.ToArray(); depthTexture.LoadRawTextureData(depthData); depthTexture.Apply(); matt.SetTexture("_MainTex", depthTexture); } } else { await Task.Run(() => { }); } } }
private void UpdateColorData(Capture capture) { //Getting color information Image colorImage = new Image(ImageFormat.ColorBgra32, depthWidth, depthHeight); transformation.ColorImageToDepthCamera(capture.DepthImage, capture.ColorImage, colorImage); if (colorImage != null) { lock (colors) { colorImage.CopyTo(colors); } colorImage.Dispose(); } }
//Kinectからデータを取得し、描画するメソッド private async Task KinectLoop() { //while文でkinectからデータを取り続ける while (true) { //GetCaptureでKinectのデータを取得 using (Capture capture = await Task.Run(() => kinect.GetCapture()).ConfigureAwait(true)) { //Depth画像との位置・サイズ合わせ済みの画像を取得 Image colorImage = transformation.ColorImageToDepthCamera(capture); //色情報のみの配列を取得 BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray(); //capture.DepthでDepth画像を取得 //さらにDepthImageToPointCloudでxyzに変換 Image xyzImage = transformation.DepthImageToPointCloud(capture.Depth); //変換後のデータから点の座標のみの配列を取得 Short3[] xyzArray = xyzImage.GetPixels <Short3>().ToArray(); //Kinectで取得した全点の座標や色を代入 for (int i = 0; i < num; i++) { //頂点座標の代入 vertices[i].x = xyzArray[i].X * 0.001f; vertices[i].y = -xyzArray[i].Y * 0.001f;//上下反転 vertices[i].z = xyzArray[i].Z * 0.001f; //色の代入 colors[i].b = colorArray[i].B; colors[i].g = colorArray[i].G; colors[i].r = colorArray[i].R; colors[i].a = 255; } //meshに最新の点の座標と色を渡す mesh.vertices = vertices; mesh.colors32 = colors; mesh.RecalculateBounds(); } } }
//Kinectからデータを取得→描画 private async Task KinectLoop() { while (true) { using (Capture capture = await Task.Run(() => _kinectDevice.GetCapture()).ConfigureAwait(true)) { Image colorImage = _kinectTransformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray(); Image xyzImage = _kinectTransformation.DepthImageToPointCloud(capture.Depth); Short3[] xyzArray = xyzImage.GetPixels <Short3>().ToArray(); for (int i = 0; i < _pointNum; i++) { _meshVertices[i].x = xyzArray[i].X * 0.001f; _meshVertices[i].y = -xyzArray[i].Y * 0.001f; _meshVertices[i].z = xyzArray[i].Z * 0.001f; _meshColors[i].b = colorArray[i].B; _meshColors[i].g = colorArray[i].G; _meshColors[i].r = colorArray[i].R; _meshColors[i].a = 255; } // update vertices and colors _mesh.vertices = _meshVertices; _mesh.colors32 = _meshColors; // update indices List <int> indiceList = GetIndiceList(_meshVertices); _mesh.SetIndices(indiceList, _meshTopology, 0); _mesh.RecalculateBounds(); } } }
private void Start() { _kinect = Device.Open(); _kinect.StartCameras(new DeviceConfiguration { ColorFormat = ImageFormat.ColorBGRA32, ColorResolution = ColorResolution.R1080p, DepthMode = DepthMode.NFOV_2x2Binned, SynchronizedImagesOnly = true, CameraFPS = FPS.FPS30 }); _isRunning = true; // get calibrations _depthCameraCalibration = _kinect.GetCalibration().DepthCameraCalibration; _kinectTransformation = _kinect.GetCalibration().CreateTransformation(); // texture settings _colorTexture2D = new Texture2D(_depthCameraCalibration.ResolutionWidth, _depthCameraCalibration.ResolutionHeight, TextureFormat.BGRA32, false); _depthTexture2D = new Texture2D(_depthCameraCalibration.ResolutionWidth, _depthCameraCalibration.ResolutionHeight, TextureFormat.RGBAFloat, false) { filterMode = FilterMode.Point }; // preview panel - plane object for preview color/depth texture if (previewPlane != null) { previewPlane.GetComponent <MeshRenderer>().material.SetTexture(MainTex, _colorTexture2D); previewPlane.transform.localScale = new Vector3(1f, 1f, (float)_depthCameraCalibration.ResolutionHeight / _depthCameraCalibration.ResolutionWidth); } // vfx settings if (effect != null) { effect.SetUInt(_propertyWidth, (uint)_depthCameraCalibration.ResolutionWidth); effect.SetUInt(_propertyHeight, (uint)_depthCameraCalibration.ResolutionHeight); effect.SetTexture(_propertyColorImage, _colorTexture2D); effect.SetTexture(_propertyXyzImage, _depthTexture2D); } UniTask.Run(() => { while (_isRunning) { using (var capture = _kinect.GetCapture()) { Image colorImage = _kinectTransformation.ColorImageToDepthCamera(capture); _rawColorData = colorImage.Memory.ToArray(); Image xyzImage = _kinectTransformation.DepthImageToPointCloud(capture.Depth); _xyzs = xyzImage.GetPixels <Short3>() .ToArray() .Select(short3 => new Color(short3.X / 100.0f, -short3.Y / 100.0f, short3.Z / 100.0f)) .ToArray(); } } }, true, this.GetCancellationTokenOnDestroy()).Forget(); }
private async Task KinectLoop() { while (true) { using (Capture capture = await Task.Run(() => kinect.GetCapture()).ConfigureAwait(true)) { //Getting color information Image colorImage = transformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray(); //Getting vertices of point cloud Image xyzImage = transformation.DepthImageToPointCloud(capture.Depth); // Image depthImage = transformation.DepthImageToColorCamera(capture); Short3[] xyzArray = xyzImage.GetPixels <Short3>().ToArray(); for (int i = 0; i < num; i++) { vertices[i].x = xyzArray[i].X * 0.001f; vertices[i].y = -xyzArray[i].Y * 0.001f;//上下反転 vertices[i].z = xyzArray[i].Z * 0.001f; float norm = xyzArray[i].Z * 0.001f / 10.0f; byte depth = 0; if (norm >= foregroundBoundry && norm <= backgroundBoundry) { depth = (byte)(255 - (norm * 255)); } depthValues[i].g = depth; depthValues[i].r = depth; depthValues[i].b = depth; depthValues[i].a = 255; colors[i].b = colorArray[i].B; colors[i].g = colorArray[i].G; colors[i].r = colorArray[i].R; colors[i].a = 255; } depthTexture.SetPixels32(depthValues); depthTexture.Apply(true, false); colorTexture.SetPixels32(colors); colorTexture.Apply(true, false); //plane.SetTexture("Base (RGB)", depthTexture); depthMat.mainTexture = depthTexture; colorMat.mainTexture = colorTexture; //plane.GetComponent<Material>().SetTexture("Base (RGB)", depthTexture); //plane.GetComponent<Material>().mainTexture = depthTexture; mesh.vertices = vertices; mesh.colors32 = colors; mesh.RecalculateBounds(); } } }
private async Task CameraLoop(Device device) { Material matt = mesh.material; while (running) { if (collectCameraData) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { switch (kinectSettings.transformationMode) { case TransformationMode.ColorToDepth: finalColor = transformation.ColorImageToDepthCamera(capture); finalDepth = capture.Depth; break; case TransformationMode.DepthToColor: finalColor = capture.Color; finalDepth = transformation.DepthImageToColorCamera(capture); break; case TransformationMode.None: finalColor = capture.Color; finalDepth = capture.Depth; break; } if (volumeBuffer == null) { matrixSize = new Vector3Int((int)(finalColor.WidthPixels * kinectSettings.volumeScale.x), (int)(finalColor.HeightPixels * kinectSettings.volumeScale.y), (int)((KinectUtilities.depthRanges[(int)device.CurrentDepthMode].y - KinectUtilities.depthRanges[(int)device.CurrentDepthMode].x) / 11 * kinectSettings.volumeScale.z)); volumeBuffer = new ComputeBuffer(matrixSize.x * matrixSize.y * matrixSize.z, 4 * sizeof(float), ComputeBufferType.Default); //print("Made Volume Buffer || Matrix Size: " + matrixSize); extractedVolumeBuffer = new float[matrixSize.x * matrixSize.y * matrixSize.z * 4]; extractedVolumeBytes = new byte[matrixSize.x * matrixSize.y * matrixSize.z * 4 * 4]; } if (colorTexture == null) { colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false); colorData = new byte[finalColor.Memory.Length]; //print("Made Color Texture"); } if (depthTexture == null) { depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); depthData = new byte[finalDepth.Memory.Length]; //print("Made Depth Texture"); } colorData = finalColor.Memory.ToArray(); colorTexture.LoadRawTextureData(colorData); colorTexture.Apply(); depthData = finalDepth.Memory.ToArray(); depthTexture.LoadRawTextureData(depthData); depthTexture.Apply(); configureComputeShader(); kinectProcessingShader.Dispatch(computeShaderKernelIndex, matrixSize.x / 16, matrixSize.y / 16, 1); // Get the volume buffer data as a byte array volumeBuffer.GetData(extractedVolumeBytes); // TODO: Test which is faster, or if a dedicated thread would be best //Option 1: Use the UserWorkItem Threadpool to manage thread for me ThreadPool.QueueUserWorkItem((state) => Postprocess((Byte[])state), extractedVolumeBytes); //Option 2: Spawn a thread for each frame //new Thread(() => Postprocess(extractedVolumeBytes)).Start(); if (compressedBytes == 0) { byte[] compressedArray = CompressData(extractedVolumeBytes); compressedBytes = compressedArray.Length; maxRecordingSeconds = (maxFileSizeMb * 1000 * 1000) / (compressedBytes * KinectUtilities.FPStoInt(kinectSettings.fps)); } matt.SetBuffer("colors", volumeBuffer); matt.SetInt("_MatrixX", matrixSize.x); matt.SetInt("_MatrixY", matrixSize.y); matt.SetInt("_MatrixZ", matrixSize.z); Graphics.CopyTexture(depthTexture, oldDepthTexture); } } else { await Task.Run(() => { }); } } }
private async Task CameraLoop(Device device) { Material matt = mesh.material; while (running) { if (collectCameraData) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { switch (transformationMode) { case TransformationMode.ColorToDepth: finalColor = transformation.ColorImageToDepthCamera(capture); finalDepth = capture.Depth; break; case TransformationMode.DepthToColor: finalColor = capture.Color; finalDepth = transformation.DepthImageToColorCamera(capture); break; case TransformationMode.None: finalColor = capture.Color; finalDepth = capture.Depth; break; } if (volumeBuffer == null) { matrixSize = new Vector3Int((int)(finalColor.WidthPixels * volumeScale.x), (int)(finalColor.HeightPixels * volumeScale.y), (int)((depthRanges[(int)device.CurrentDepthMode].y - depthRanges[(int)device.CurrentDepthMode].x) / 11 * volumeScale.z)); volumeBuffer = new ComputeBuffer(matrixSize.x * matrixSize.y * matrixSize.z, 4 * sizeof(float), ComputeBufferType.Default); } if (colorTexture == null) { colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false); colorData = new byte[finalColor.Memory.Length]; } if (depthTexture == null) { depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); depthData = new byte[finalDepth.Memory.Length]; } colorData = finalColor.Memory.ToArray(); colorTexture.LoadRawTextureData(colorData); colorTexture.Apply(); depthData = finalDepth.Memory.ToArray(); depthTexture.LoadRawTextureData(depthData); depthTexture.Apply(); // Apply Buffer Updates int kernelIndex = shader.FindKernel("ToBuffer"); shader.SetInt("_MatrixX", matrixSize.x); shader.SetInt("_MatrixY", matrixSize.y); shader.SetInt("_MatrixZ", matrixSize.z); shader.SetTexture(kernelIndex, "ColorTex", colorTexture); shader.SetTexture(kernelIndex, "DepthTex", depthTexture); shader.SetTexture(kernelIndex, "oldDepthTexture", oldDepthTexture); shader.SetBuffer(kernelIndex, "ResultBuffer", volumeBuffer); shader.SetInt("minDepth", minDepthMM); shader.SetInt("maxDepth", maxDepthMM); shader.Dispatch(kernelIndex, matrixSize.x / 8, matrixSize.y / 8, matrixSize.z / 8); matt.SetBuffer("colors", volumeBuffer); matt.SetInt("_MatrixX", matrixSize.x); matt.SetInt("_MatrixY", matrixSize.y); matt.SetInt("_MatrixZ", matrixSize.z); Graphics.CopyTexture(depthTexture, oldDepthTexture); } } else { await Task.Run(() => { }); } } }
private async Task KinectLoop(Device device) { while (true) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { Image modifiedColor = transformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = modifiedColor.GetPixels <BGRA>().ToArray(); Image cloudImage = transformation.DepthImageToPointCloud(capture.Depth); Short3[] PointCloud = cloudImage.GetPixels <Short3>().ToArray(); int triangleIndex = 0; int pointIndex = 0; int topLeft, topRight, bottomLeft, bottomRight; int tl, tr, bl, br; for (int y = 0; y < depthHeight; y++) { for (int x = 0; x < depthWidth; x++) { vertices[pointIndex].x = PointCloud[pointIndex].X * 0.001f; vertices[pointIndex].y = -PointCloud[pointIndex].Y * 0.001f; vertices[pointIndex].z = PointCloud[pointIndex].Z * 0.001f; col[pointIndex].a = 255; col[pointIndex].b = colorArray[pointIndex].B; col[pointIndex].g = colorArray[pointIndex].G; col[pointIndex].r = colorArray[pointIndex].R; if (x != (depthWidth - 1) && y != (depthHeight - 1)) { topLeft = pointIndex; topRight = topLeft + 1; bottomLeft = topLeft + depthWidth; bottomRight = bottomLeft + 1; tl = PointCloud[topLeft].Z; tr = PointCloud[topRight].Z; bl = PointCloud[bottomLeft].Z; br = PointCloud[bottomRight].Z; if (tl > nearClip && tr > nearClip && bl > nearClip) { indeces[triangleIndex++] = topLeft; indeces[triangleIndex++] = topRight; indeces[triangleIndex++] = bottomLeft; } else { indeces[triangleIndex++] = 0; indeces[triangleIndex++] = 0; indeces[triangleIndex++] = 0; } if (bl > nearClip && tr > nearClip && br > nearClip) { indeces[triangleIndex++] = bottomLeft; indeces[triangleIndex++] = topRight; indeces[triangleIndex++] = bottomRight; } else { indeces[triangleIndex++] = 0; indeces[triangleIndex++] = 0; indeces[triangleIndex++] = 0; } } pointIndex++; } } texture.SetPixels32(col); texture.Apply(); mesh.vertices = vertices; mesh.triangles = indeces; mesh.RecalculateBounds(); } } }
private async Task CameraLoop(Device device) { Material matt = mesh.material; while (running) { if (collectCameraData) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { switch (transformationMode) { case TransformationMode.ColorToDepth: finalColor = transformation.ColorImageToDepthCamera(capture); finalDepth = capture.Depth; break; case TransformationMode.DepthToColor: finalColor = capture.Color; finalDepth = transformation.DepthImageToColorCamera(capture); break; case TransformationMode.None: finalColor = capture.Color; finalDepth = capture.Depth; break; } if (volumeTexture == null) { matrixSize = new Vector3Int((int)(finalColor.WidthPixels * volumeScale.x), (int)(finalColor.HeightPixels * volumeScale.y), (int)((depthRanges[(int)device.CurrentDepthMode].y - depthRanges[(int)device.CurrentDepthMode].x) / 11 * volumeScale.z)); volumeTexture = new RenderTexture(matrixSize.x, matrixSize.y, 0, RenderTextureFormat.ARGB32); volumeTexture.enableRandomWrite = true; volumeTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex3D; volumeTexture.volumeDepth = matrixSize.z; volumeTexture.Create(); } if (colorTexture == null) { colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false); colorData = new byte[finalColor.Memory.Length]; } if (depthTexture == null) { depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false); depthData = new byte[finalDepth.Memory.Length]; } colorData = finalColor.Memory.ToArray(); colorTexture.LoadRawTextureData(colorData); colorTexture.Apply(); depthData = finalDepth.Memory.ToArray(); depthTexture.LoadRawTextureData(depthData); depthTexture.Apply(); // Apply Texture Updates int kernelIndex = shader.FindKernel("ToTexture"); shader.SetTexture(kernelIndex, "ColorTex", colorTexture); shader.SetTexture(kernelIndex, "DepthTex", depthTexture); shader.SetTexture(kernelIndex, "oldDepthTexture", oldDepthTexture); shader.SetTexture(kernelIndex, "ResultTexture", volumeTexture); shader.SetVector("_Size", new Vector4(matrixSize.x, matrixSize.y, matrixSize.z, 1)); shader.SetInt("minDepth", minDepthMM); shader.SetInt("maxDepth", maxDepthMM); shader.Dispatch(kernelIndex, matrixSize.x / 16, matrixSize.y / 16, 1); matt.SetTexture("_Volume", volumeTexture); Graphics.CopyTexture(depthTexture, oldDepthTexture); } } else { await Task.Run(() => { }); } } }
// processes the camera frame private void ProcessCameraFrame(KinectInterop.SensorData sensorData, Capture capture) { // check for color & depth sync if (isSyncDepthAndColor && (capture.Color == null || capture.Depth == null)) { return; } try { // color frame if (capture.Color != null && rawColorImage != null) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("color", capture.Color.DeviceTimestamp.Ticks); } lock (colorFrameLock) { capture.Color.CopyBytesTo(rawColorImage, 0, 0, rawColorImage.Length); rawColorTimestamp = (ulong)capture.Color.DeviceTimestamp.Ticks; colorFrameNumber = currentFrameNumber; //Debug.Log("RawColorTimestamp: " + rawColorTimestamp); } } // depth frame if (capture.Depth != null && rawDepthImage != null) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("depth", capture.Depth.DeviceTimestamp.Ticks); } lock (depthFrameLock) { capture.Depth.CopyTo(rawDepthImage, 0, 0, rawDepthImage.Length); rawDepthTimestamp = (ulong)capture.Depth.DeviceTimestamp.Ticks; depthFrameNumber = currentFrameNumber; //Debug.Log("RawDepthTimestamp: " + rawDepthTimestamp); } } // infrared frame if (capture.IR != null && rawInfraredImage != null) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("ir", capture.IR.DeviceTimestamp.Ticks); } lock (infraredFrameLock) { capture.IR.CopyTo(rawInfraredImage, 0, 0, rawInfraredImage.Length); rawInfraredTimestamp = (ulong)capture.IR.DeviceTimestamp.Ticks; infraredFrameNumber = currentFrameNumber; //Debug.Log("RawInfraredTimestamp: " + rawInfraredTimestamp); } } // transformation data frames if ((depth2ColorDataFrame != null || color2DepthDataFrame != null) && capture.Color != null && capture.Depth != null) { if (coordMapperTransform == null) { coordMapperTransform = coordMapper.CreateTransformation(); } if (depth2ColorDataFrame != null) { lock (depth2ColorFrameLock) { using (Image d2cColorData = coordMapperTransform.ColorImageToDepthCamera(capture)) { d2cColorData.CopyTo <byte>(depth2ColorDataFrame, 0, 0, depth2ColorDataFrame.Length); lastDepth2ColorFrameTime = (ulong)capture.Depth.DeviceTimestamp.Ticks; } } } if (color2DepthDataFrame != null) { lock (color2DepthFrameLock) { using (Image c2dDepthData = coordMapperTransform.DepthImageToColorCamera(capture)) { c2dDepthData.CopyTo <ushort>(color2DepthDataFrame, 0, 0, color2DepthDataFrame.Length); lastColor2DepthFrameTime = (ulong)capture.Color.DeviceTimestamp.Ticks; } } } } } catch (System.Exception ex) { Debug.LogException(ex); } }
private async Task KinectLoop(Device device) { while (true) { using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true)) { Image modifiedColor = transformation.ColorImageToDepthCamera(capture); BGRA[] colorArray = modifiedColor.GetPixels <BGRA>().ToArray(); Image hoge = transformation.DepthImageToPointCloud(capture.Depth); Short3[] PointCloud = hoge.GetPixels <Short3>().ToArray(); int width = depthWidth; int height = depthHeight; int triangleIndex = 0; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { int index = (y * width) + x; vertices[index].x = PointCloud[index].X * 0.001f; vertices[index].y = -PointCloud[index].Y * 0.001f; vertices[index].z = PointCloud[index].Z * 0.001f; col[index].a = 255; col[index].b = colorArray[index].B; col[index].g = colorArray[index].G; col[index].r = colorArray[index].R; if (x != (width - 1) && y != (height - 1)) { int topLeft = index; int topRight = topLeft + 1; int bottomLeft = topLeft + width; int bottomRight = bottomLeft + 1; int tl = PointCloud[index].Z; int tr = PointCloud[index + 1].Z; int bl = PointCloud[index + depthWidth].Z; int br = PointCloud[index + 1 + depthWidth].Z; if (tl > nearThreshold && tr > nearThreshold && bl > nearThreshold) { vertexIndeces[triangleIndex++] = topLeft; vertexIndeces[triangleIndex++] = topRight; vertexIndeces[triangleIndex++] = bottomLeft; } else { vertexIndeces[triangleIndex++] = 0; vertexIndeces[triangleIndex++] = 0; vertexIndeces[triangleIndex++] = 0; } if (bl > nearThreshold && tr > nearThreshold && br > nearThreshold) { vertexIndeces[triangleIndex++] = bottomLeft; vertexIndeces[triangleIndex++] = topRight; vertexIndeces[triangleIndex++] = bottomRight; } else { vertexIndeces[triangleIndex++] = 0; vertexIndeces[triangleIndex++] = 0; vertexIndeces[triangleIndex++] = 0; } } } } tex.SetPixels32(col); tex.Apply(); mesh.vertices = vertices; mesh.triangles = vertexIndeces; mesh.RecalculateBounds(); } } }