// updates the body tracking frame public void UpdateBtFrame(int sensorIndex, long frameTimestamp) { if (frameTimestamp == 0) { Debug.Log("Ignoring bt-frame for syncher index " + sensorIndex + ". Timestamp: " + frameTimestamp); return; } lock (syncherLock) { if (syncherData == null || numSensors != syncherData.Length || syncherData[sensorIndex] == null) { CreateSyncherData(sensorIndex); } // set new frame //Debug.Log("Setting bt-frame for syncher index " + sensorIndex + ". Timestamp: " + frameTimestamp); syncherData[sensorIndex].btTimestamp = frameTimestamp; // check for synched body frames bool bAllSynched = numSensors > 1 && iMaster >= 0 && syncherData[iMaster] != null && syncherData[iMaster].btTimestamp != 0; if (bAllSynched) { long masterTime = syncherData[iMaster].btTimestamp; for (int i = 0; i < numSensors; i++) { if (syncherData[i] == null || syncherData[i].btTimestamp == 0) { bAllSynched = false; break; } long subTime = syncherData[i].btTimestamp; long expTime = masterTime + syncherData[i].expDelay; long subError = subTime - expTime; if (i != iMaster && (subTime == 0 || subError < -MAX_TIME_ERROR || subError > MAX_TIME_ERROR)) { bAllSynched = false; break; } } } if (bAllSynched) { //Debug.Log("Synched bt-frames. Index: " + sensorIndex + " MasterTime: " + syncherData[iMaster].btTimestamp); // process synched body frames for (int i = 0; i < numSensors; i++) { Kinect4AzureInterface sensorInt = sensorInts[i]; KinectInterop.SensorData sensorData = sensorDatas[i]; //Debug.Log(" Processing bt-frame " + i + ". Timestamp: " + syncherData[i].btTimestamp); sensorInt.ProcessBodyFrame(sensorData, IntPtr.Zero, true); sensorInt.ProcessBtSensorCapture(sensorData); } } else { //Debug.Log("Bt-frames not synched. Index: " + sensorIndex + " ThisTime: " + syncherData[sensorIndex].btTimestamp + ", MasterTime: " + syncherData[iMaster].btTimestamp); } } }
public override void CloseSensor(KinectInterop.SensorData sensorData) { Debug.Log("DummyK4A-sensor closed"); }
// updates the push bt-capture public void UpdatePushBtCapture(int sensorIndex, long capTimestamp, Capture capture) { if (capture == null || capture.Depth == null) { return; } if (capTimestamp == 0) { Debug.Log("Ignoring push-capture for syncher index " + sensorIndex + ". Timestamp: " + capTimestamp); capture.Dispose(); return; } lock (syncherLock) { if (syncherData == null || numSensors != syncherData.Length || syncherData[sensorIndex] == null) { CreateSyncherData(sensorIndex); } // dispose current capture if (syncherData[sensorIndex].pushCapture != null) { //Debug.Log("Disposing push-capture for syncher index " + sensorIndex + ". Timestamp: " + syncherData[sensorIndex].pushCapTimestamp); syncherData[sensorIndex].pushCapture.Dispose(); syncherData[sensorIndex].pushCapture = null; } // set new capture //Debug.Log("Setting push-capture for syncher index " + sensorIndex + ". Timestamp: " + capTimestamp); syncherData[sensorIndex].pushCapTimestamp = capTimestamp; syncherData[sensorIndex].pushCapture = capture; // check for synched captures bool bAllSynched = numSensors > 1 && iMaster >= 0 && syncherData[iMaster] != null && syncherData[iMaster].pushCapTimestamp != 0; if (bAllSynched) { long masterTime = syncherData[iMaster].pushCapTimestamp; for (int i = 0; i < numSensors; i++) { if (syncherData[i] == null || syncherData[i].pushCapTimestamp == 0) { bAllSynched = false; break; } long subTime = syncherData[i].pushCapTimestamp; long expTime = masterTime + syncherData[i].expDelay; long subError = subTime - expTime; if (i != iMaster && (subTime == 0 || subError < -MAX_TIME_ERROR || subError > MAX_TIME_ERROR)) { bAllSynched = false; break; } } } if (bAllSynched) { //Debug.Log("Synched push-captures. Index: " + sensorIndex + " MasterTime: " + syncherData[iMaster].pushCapTimestamp); // process synched sensor captures for (int i = 0; i < numSensors; i++) { Kinect4AzureInterface sensorInt = sensorInts[i]; KinectInterop.SensorData sensorData = sensorDatas[i]; //Debug.Log(" Processing push capture " + i + ". Timestamp: " + syncherData[i].pushCapTimestamp); sensorInt.PushBodyFrame(sensorData, syncherData[i].pushCapture, true); syncherData[i].pushCapture = null; } } else { //Debug.Log("Push-captures not synched. Index: " + sensorIndex + " ThisTime: " + syncherData[sensorIndex].pushCapTimestamp + ", MasterTime: " + syncherData[iMaster].pushCapTimestamp); } } }
public override bool UpdateSensorData(KinectInterop.SensorData sensorData, KinectManager kinectManager, bool isPlayMode) { base.UpdateSensorData(sensorData, kinectManager, isPlayMode); if (sensorData.colorCamIntr == null && colorStreamProfile != null) { lock (colorFrameLock) { Intel.RealSense.Intrinsics colorCamIntr = colorStreamProfile.GetIntrinsics(); if (colorCamIntr.model != Distortion.None) { GetCameraIntrinsics(colorCamIntr, ref sensorData.colorCamIntr, 1); } } } if (sensorData.depthCamIntr == null && depthStreamProfile != null) { lock (depthFrameLock) { Intel.RealSense.Intrinsics depthCamIntr = depthStreamProfile.GetIntrinsics(); //Debug.Log("RS distType: " + depthCamIntr.model); if (depthCamIntr.model != Distortion.None || deviceStreamingMode == KinectInterop.DeviceStreamingMode.PlayRecording) { GetCameraIntrinsics(depthCamIntr, ref sensorData.depthCamIntr, 0); if (depthCamIntr.model == Distortion.None && deviceStreamingMode == KinectInterop.DeviceStreamingMode.PlayRecording) { // workaround for playback mode (model & coeffs are missing there) sensorData.depthCamIntr.distType = KinectInterop.DistortionType.BrownConrady; } // body & body-index data if ((frameSourceFlags & KinectInterop.FrameSource.TypeBody) != 0) { Debug.LogWarning("Body tracking is not supported for RealSense sensors!"); //if(depthCamIntr.width == 640 && depthCamIntr.height == 480) //{ // Calibration cal = GetBodyTrackerCalibration(sensorData.depthCamIntr); // InitBodyTracking(frameSourceFlags, sensorData, cal, true); // if (isSyncBodyAndDepth && bodyTracker == null) // { // // don't sync body and depth if the body tracker can't be created // isSyncBodyAndDepth = false; // } //} } } } } if (sensorData.depth2ColorExtr == null && depthStreamProfile != null && colorStreamProfile != null) { lock (depthFrameLock) { lock (colorFrameLock) { Intel.RealSense.Extrinsics depth2ColorExtr = depthStreamProfile.GetExtrinsicsTo(colorStreamProfile); GetCameraExtrinsics(depth2ColorExtr, ref sensorData.depth2ColorExtr); } } } if (sensorData.color2DepthExtr == null && colorStreamProfile != null && depthStreamProfile != null) { lock (colorFrameLock) { lock (depthFrameLock) { Intel.RealSense.Extrinsics color2DepthExtr = colorStreamProfile.GetExtrinsicsTo(depthStreamProfile); GetCameraExtrinsics(color2DepthExtr, ref sensorData.color2DepthExtr); } } } return(true); }
// transforms the depth frame to color-camera resolution protected bool TransformDepthFrameToColorCamResolution(KinectInterop.SensorData sensorData, ushort[] d2cDepthFrame) { if (rawDepthImage != null && rawColorImage != null && sensorData.depthCamIntr != null && sensorData.colorCamIntr != null && sensorData.depth2ColorExtr != null) { int depthImageW = sensorData.depthImageWidth; int depthImageH = sensorData.depthImageHeight; int colorImageW = sensorData.colorImageWidth; int colorImageH = sensorData.colorImageHeight; int mapImageLen = sensorData.colorImageWidth * sensorData.colorImageHeight; if (d2cDepthFrame == null || d2cDepthFrame.Length != mapImageLen) { throw new Exception("d2cDepthFrame is not big enough. Should be " + mapImageLen + " ushorts."); } //Intrinsics depthIntr = depthStreamProfile.GetIntrinsics(); //Intrinsics colorIntr = colorStreamProfile.GetIntrinsics(); //Extrinsics d2cExtr = depthStreamProfile.GetExtrinsicsTo(colorStreamProfile); for (int dy = 0, dIndex = 0; dy < depthImageH; dy++) { for (int dx = 0; dx < depthImageW; dx++, dIndex++) { ushort depthVal = rawDepthImage[dIndex]; if (depthVal != 0) { float depth = (float)depthVal * 0.001f; Vector2 depthPos1 = new Vector2(dx - 0.5f, dy - 0.5f); Vector3 depthSpacePos1 = UnprojectPoint(sensorData.depthCamIntr, depthPos1, depth); Vector3 colorSpacePos1 = TransformPoint(sensorData.depth2ColorExtr, depthSpacePos1); Vector2 colorPos1 = ProjectPoint(sensorData.colorCamIntr, colorSpacePos1); int colorPos1X = Mathf.RoundToInt(colorPos1.x); int colorPos1Y = Mathf.RoundToInt(colorPos1.y); Vector2 depthPos2 = new Vector2(dx + 0.5f, dy + 0.5f); Vector3 depthSpacePos2 = UnprojectPoint(sensorData.depthCamIntr, depthPos2, depth); Vector3 colorSpacePos2 = TransformPoint(sensorData.depth2ColorExtr, depthSpacePos2); Vector2 colorPos2 = ProjectPoint(sensorData.colorCamIntr, colorSpacePos2); int colorPos2X = Mathf.RoundToInt(colorPos2.x); int colorPos2Y = Mathf.RoundToInt(colorPos2.y); if (colorPos1X < 0 || colorPos1Y < 0 || colorPos2X >= colorImageW || colorPos2Y >= colorImageH) { continue; } for (int y = colorPos1Y; y <= colorPos2Y; y++) { int cIndex = y * colorImageW + colorPos1X; for (int x = colorPos1X; x <= colorPos2X; x++, cIndex++) { d2cDepthFrame[cIndex] = depthVal; } } } else { //d2cDepthFrame[cIndex] = 0; } } } return(true); } return(false); }
// initializes background removal with shaders private bool InitBackgroundRemoval(KinectInterop.SensorData sensorData) { if (sensorData != null && sensorData.sensorInterface != null && KinectInterop.IsDirectX11Available()) { sensorInt = (DepthSensorBase)sensorData.sensorInterface; if (filterByBI != null) { if (!filterByBI.InitBackgroundRemoval(sensorData)) { Debug.LogError("Could not init the background removal by body index!"); return(false); } } // set the texture resolution if (sensorInt.pointCloudColorTexture == null && sensorInt.pointCloudVertexTexture == null) { sensorInt.pointCloudResolution = foregroundImageResolution; } textureRes = sensorInt.GetPointCloudTexResolution(sensorData); colorTexture = KinectInterop.CreateRenderTexture(colorTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32); if (filterByBI == null) { vertexTexture = KinectInterop.CreateRenderTexture(vertexTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGBHalf); } alphaTexture = KinectInterop.CreateRenderTexture(alphaTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32); foregroundTexture = KinectInterop.CreateRenderTexture(foregroundTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32); sensorInt.pointCloudColorTexture = colorTexture; sensorInt.pointCloudVertexTexture = vertexTexture; Shader erodeShader = Shader.Find("Kinect/ErodeShader"); erodeFilterMat = new Material(erodeShader); erodeFilterMat.SetFloat("_TexResX", (float)textureRes.x); erodeFilterMat.SetFloat("_TexResY", (float)textureRes.y); //sensorData.erodeBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); Shader dilateShader = Shader.Find("Kinect/DilateShader"); dilateFilterMat = new Material(dilateShader); dilateFilterMat.SetFloat("_TexResX", (float)textureRes.x); dilateFilterMat.SetFloat("_TexResY", (float)textureRes.y); //sensorData.dilateBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); Shader gradientShader = Shader.Find("Kinect/GradientShader"); gradientFilterMat = new Material(gradientShader); Shader medianShader = Shader.Find("Kinect/MedianShader"); medianFilterMat = new Material(medianShader); //sensorData.medianBodyMaterial.SetFloat("_Amount", 1.0f); Shader blurShader = Shader.Find("Kinect/BlurShader"); blurFilterMat = new Material(blurShader); Shader invertShader = Shader.Find("Kinect/InvertShader"); invertAlphaMat = new Material(invertShader); Shader foregroundShader = Shader.Find("Kinect/ForegroundShader"); foregroundMat = new Material(foregroundShader); if (filterByDist == null && filterByBI == null) { foregroundFilterShader = Resources.Load("ForegroundFiltBodyShader") as ComputeShader; foregroundFilterKernel = foregroundFilterShader != null?foregroundFilterShader.FindKernel("FgFiltBody") : -1; //foregroundFilterPos = new Vector4[KinectInterop.Constants.MaxBodyCount]; bodyPosMin = new Vector4[MAX_BODY_COUNT]; bodyPosMaxX = new Vector4[MAX_BODY_COUNT]; bodyPosMaxY = new Vector4[MAX_BODY_COUNT]; bodyPosMaxZ = new Vector4[MAX_BODY_COUNT]; bodyPosDot = new Vector4[MAX_BODY_COUNT]; } return(true); } return(false); }
public override void PollSensorFrames(KinectInterop.SensorData sensorData) { try { if (kinectPlayback != null) { if (kinectPlayback.IsEndOfStream() && playbackPosSeconds < 0f) { return; } if (playbackStartTime == 0) { // start time playbackStartTime = DateTime.Now.Ticks; } long currentPlayTime = playbackPosSeconds < 0f ? DateTime.Now.Ticks - playbackStartTime : (long)(playbackPosSeconds * 10000000); if ((frameSourceFlags & (KinectInterop.FrameSource) 0x7F) != 0) { kinectPlayback.SeekTimestamp((ulong)(currentPlayTime / 10)); Capture capture = kinectPlayback.GetNextCapture(); if (capture != null) { ProcessCameraFrames(sensorData, capture); capture.Dispose(); //currentFrameNumber++; } else { Debug.Log("End of recording detected."); } } if ((frameSourceFlags & KinectInterop.FrameSource.TypePose) != 0) { ImuSample imuSample = kinectPlayback.GetNextImuSample(); while (imuSample != null) { ProcessImuFrame(imuSample); ulong imuTimestamp = (ulong)imuSample.AccelerometerTimestamp.Ticks; if (kinectPlayback.IsEndOfStream() || imuTimestamp >= rawDepthTimestamp) { break; } imuSample = kinectPlayback.GetNextImuSample(); } } } else { if (isCamerasStarted) { Capture capture = kinectSensor.GetCapture(timeToWait); ProcessCameraFrames(sensorData, capture); capture.Dispose(); //currentFrameNumber++; } if (isImuStarted) { ImuSample imuSample = kinectSensor.GetImuSample(timeToWait); while (imuSample != null) { ProcessImuFrame(imuSample); imuSample = kinectSensor.GetImuSample(timeToWait); } } } } catch (System.TimeoutException) { // do nothing } catch (System.Exception ex) { Debug.LogException(ex); } }
public override KinectInterop.SensorData OpenSensor(KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth) { // save initial parameters base.OpenSensor(dwFlags, bSyncDepthAndColor, bSyncBodyAndDepth); // ensure resources are in path //KinectInterop.CopyResourceFile("depthengine_1_0.x64.dll", "depthengine_1_0.dll"); KinectInterop.CopyResourceFile("onnxruntime.dll", "onnxruntime.dll"); KinectInterop.CopyResourceFile("dnn_model.onnx", "dnn_model.onnx"); // try to open the sensor or play back the recording KinectInterop.SensorData sensorData = new KinectInterop.SensorData(); if (deviceStreamingMode == KinectInterop.DeviceStreamingMode.PlayRecording) { if (string.IsNullOrEmpty(recordingFile)) { Debug.LogError("Playback selected, but the path to recording file is missing."); return(null); } if (!System.IO.File.Exists(recordingFile)) { Debug.LogError("PlayRecording selected, but the recording file cannot be found: " + recordingFile); return(null); } Debug.Log("Playing back: " + recordingFile); kinectPlayback = new Playback(recordingFile); colorCameraMode = (ColorCameraMode)kinectPlayback.playback_config.color_resolution; depthCameraMode = (DepthCameraMode)kinectPlayback.playback_config.depth_mode; deviceSyncMode = kinectPlayback.playback_config.wired_sync_mode; coordMapper = kinectPlayback.playback_calibration; playbackStartTime = DateTime.Now.Ticks; Debug.Log(string.Format("color_track_enabled: {0}, depth_track_enabled: {1}, ir_track_enabled: {2}, imu_track_enabled: {3}, depth_delay_off_color_usec: {4}", kinectPlayback.playback_config.color_track_enabled, kinectPlayback.playback_config.depth_track_enabled, kinectPlayback.playback_config.ir_track_enabled, kinectPlayback.playback_config.imu_track_enabled, kinectPlayback.playback_config.depth_delay_off_color_usec)); } else { List <KinectInterop.SensorDeviceInfo> alSensors = GetAvailableSensors(); if (deviceIndex >= alSensors.Count) { Debug.Log(" D" + deviceIndex + " is not available. You can set the device index to -1, to disable it."); return(null); } // try to open the sensor kinectSensor = Device.Open(deviceIndex); if (kinectSensor == null) { Debug.LogError(" D" + recordingFile + " cannot be opened."); return(null); } DeviceConfiguration kinectConfig = new DeviceConfiguration(); kinectConfig.SynchronizedImagesOnly = isSyncDepthAndColor; kinectConfig.WiredSyncMode = deviceSyncMode; kinectConfig.SuboridinateDelayOffMaster = new TimeSpan(subDeviceDelayUsec * 10); // color kinectConfig.ColorFormat = ImageFormat.ColorBGRA32; if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) { kinectConfig.ColorResolution = (ColorResolution)colorCameraMode; } else { kinectConfig.ColorResolution = ColorResolution.Off; } // depth if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) { kinectConfig.DepthMode = (DepthMode)depthCameraMode; } else { kinectConfig.DepthMode = DepthMode.Off; } // infrared if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0) { // ?? } // start the cameras kinectSensor.StartCameras(kinectConfig); isCamerasStarted = true; if ((dwFlags & KinectInterop.FrameSource.TypePose) != 0) { // start the IMU kinectSensor.StartImu(); isImuStarted = true; } // get reference to the coordinate mapper coordMapper = kinectSensor.GetCalibration(); } // reset the frame number currentFrameNumber = 0; // flip color & depth image vertically sensorData.colorImageScale = new Vector3(-1f, -1f, 1f); sensorData.depthImageScale = new Vector3(-1f, -1f, 1f); sensorData.infraredImageScale = new Vector3(-1f, -1f, 1f); sensorData.sensorSpaceScale = new Vector3(-1f, -1f, 1f); // color camera data & intrinsics sensorData.colorImageFormat = TextureFormat.BGRA32; sensorData.colorImageStride = 4; // 4 bytes per pixel if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) { CameraCalibration colorCamera = coordMapper.ColorCameraCalibration; sensorData.colorImageWidth = colorCamera.ResolutionWidth; sensorData.colorImageHeight = colorCamera.ResolutionHeight; rawColorImage = new byte[sensorData.colorImageWidth * sensorData.colorImageHeight * 4]; sensorData.colorImageTexture = new Texture2D(sensorData.colorImageWidth, sensorData.colorImageHeight, TextureFormat.BGRA32, false); sensorData.colorImageTexture.wrapMode = TextureWrapMode.Clamp; sensorData.colorImageTexture.filterMode = FilterMode.Point; } // depth camera data & intrinsics if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) { CameraCalibration depthCamera = coordMapper.DepthCameraCalibration; sensorData.depthImageWidth = depthCamera.ResolutionWidth; sensorData.depthImageHeight = depthCamera.ResolutionHeight; rawDepthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; sensorData.depthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; } // infrared data if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0) { if (sensorData.depthImageWidth == 0 || sensorData.depthImageHeight == 0) { CameraCalibration depthCamera = coordMapper.DepthCameraCalibration; sensorData.depthImageWidth = depthCamera.ResolutionWidth; sensorData.depthImageHeight = depthCamera.ResolutionHeight; } rawInfraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; sensorData.infraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; } // calibration data GetCameraIntrinsics(CalibrationDeviceType.Color, coordMapper.ColorCameraCalibration, ref sensorData.colorCamIntr); GetCameraIntrinsics(CalibrationDeviceType.Depth, coordMapper.DepthCameraCalibration, ref sensorData.depthCamIntr); GetCameraExtrinsics(coordMapper.ColorCameraCalibration.Extrinsics, ref sensorData.depth2ColorExtr); Debug.Log("Kinect4Azure-sensor opened."); return(sensorData); }
void Start() { kinectManager = KinectManager.Instance; sensorData = kinectManager != null?kinectManager.GetSensorData(sensorIndex) : null; }
public override void PollSensorFrames(KinectInterop.SensorData sensorData) { try { if (kinectPlayback != null) { if (kinectPlayback.IsEndOfStream()) { return; } long currentPlayTime = DateTime.Now.Ticks - playbackStartTime; Capture capture = kinectPlayback.GetNextCapture(); if (capture != null) { ProcessCameraFrame(sensorData, capture); capture.Dispose(); currentFrameNumber++; } ImuSample imuSample = kinectPlayback.GetNextImuSample(); while (imuSample != null) { ProcessImuFrame(imuSample); ulong imuTimestamp = (ulong)(imuSample.GyroTimestamp.Ticks + imuSample.AccelerometerTimestamp.Ticks) >> 1; // avg if (kinectPlayback.IsEndOfStream() || imuTimestamp >= rawDepthTimestamp) { break; } imuSample = kinectPlayback.GetNextImuSample(); } } else { if (isCamerasStarted) { Capture capture = kinectSensor.GetCapture(timeToWait); ProcessCameraFrame(sensorData, capture); capture.Dispose(); currentFrameNumber++; } if (isImuStarted) { ImuSample imuSample = kinectSensor.GetImuSample(timeToWait); while (imuSample != null) { ProcessImuFrame(imuSample); imuSample = kinectSensor.GetImuSample(timeToWait); } } } } catch (System.TimeoutException) { // do nothing } catch (System.Exception ex) { Debug.LogException(ex); } }
// processes the camera frame private void ProcessCameraFrame(KinectInterop.SensorData sensorData, Capture capture) { // check for color & depth sync if (isSyncDepthAndColor && (capture.Color == null || capture.Depth == null)) { return; } try { // color frame if (capture.Color != null && rawColorImage != null) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("color", capture.Color.DeviceTimestamp.Ticks); } lock (colorFrameLock) { capture.Color.CopyBytesTo(rawColorImage, 0, 0, rawColorImage.Length); rawColorTimestamp = (ulong)capture.Color.DeviceTimestamp.Ticks; colorFrameNumber = currentFrameNumber; //Debug.Log("RawColorTimestamp: " + rawColorTimestamp); } } // depth frame if (capture.Depth != null && rawDepthImage != null) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("depth", capture.Depth.DeviceTimestamp.Ticks); } lock (depthFrameLock) { capture.Depth.CopyTo(rawDepthImage, 0, 0, rawDepthImage.Length); rawDepthTimestamp = (ulong)capture.Depth.DeviceTimestamp.Ticks; depthFrameNumber = currentFrameNumber; //Debug.Log("RawDepthTimestamp: " + rawDepthTimestamp); } } // infrared frame if (capture.IR != null && rawInfraredImage != null) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("ir", capture.IR.DeviceTimestamp.Ticks); } lock (infraredFrameLock) { capture.IR.CopyTo(rawInfraredImage, 0, 0, rawInfraredImage.Length); rawInfraredTimestamp = (ulong)capture.IR.DeviceTimestamp.Ticks; infraredFrameNumber = currentFrameNumber; //Debug.Log("RawInfraredTimestamp: " + rawInfraredTimestamp); } } // transformation data frames if ((depth2ColorDataFrame != null || color2DepthDataFrame != null) && capture.Color != null && capture.Depth != null) { if (coordMapperTransform == null) { coordMapperTransform = coordMapper.CreateTransformation(); } if (depth2ColorDataFrame != null) { lock (depth2ColorFrameLock) { using (Image d2cColorData = coordMapperTransform.ColorImageToDepthCamera(capture)) { d2cColorData.CopyTo <byte>(depth2ColorDataFrame, 0, 0, depth2ColorDataFrame.Length); lastDepth2ColorFrameTime = (ulong)capture.Depth.DeviceTimestamp.Ticks; } } } if (color2DepthDataFrame != null) { lock (color2DepthFrameLock) { using (Image c2dDepthData = coordMapperTransform.DepthImageToColorCamera(capture)) { c2dDepthData.CopyTo <ushort>(color2DepthDataFrame, 0, 0, color2DepthDataFrame.Length); lastColor2DepthFrameTime = (ulong)capture.Color.DeviceTimestamp.Ticks; } } } } } catch (System.Exception ex) { Debug.LogException(ex); } }
public override KinectInterop.SensorData OpenSensor(KinectManager kinectManager, KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth) { // save initial parameters base.OpenSensor(kinectManager, dwFlags, bSyncDepthAndColor, bSyncBodyAndDepth); List <KinectInterop.SensorDeviceInfo> alSensors = GetAvailableSensors(); if (deviceIndex < 0 || deviceIndex >= alSensors.Count) { return(null); } sensorDeviceId = alSensors[deviceIndex].sensorId; sensorPlatform = KinectInterop.DepthSensorPlatform.DummyK4A; KinectInterop.SensorData sensorData = new KinectInterop.SensorData(); sensorData.sensorIntPlatform = sensorPlatform; sensorData.sensorId = alSensors[deviceIndex].sensorId; sensorData.sensorName = alSensors[deviceIndex].sensorName; sensorData.sensorCaps = alSensors[deviceIndex].sensorCaps; sensorData.colorImageWidth = 1920; // 1080p sensorData.colorImageHeight = 1080; sensorData.depthImageWidth = 640; // NFOV Unbinned sensorData.depthImageHeight = 576; sensorData.depthCamIntr = JsonUtility.FromJson <KinectInterop.CameraIntrinsics>(jsonDepthCamIntr); sensorData.colorCamIntr = JsonUtility.FromJson <KinectInterop.CameraIntrinsics>(jsonColorCamIntr); sensorData.depth2ColorExtr = JsonUtility.FromJson <KinectInterop.CameraExtrinsics>(jsonDepth2ColorExtr); sensorData.color2DepthExtr = JsonUtility.FromJson <KinectInterop.CameraExtrinsics>(jsonColor2DepthExtr); float[] r = sensorData.depth2ColorExtr.rotation; float[] t = sensorData.depth2ColorExtr.translation; depth2colorCamMat = new Matrix4x4(new Vector4(r[0], r[3], r[6], 0), new Vector4(r[1], r[4], r[7], 0), new Vector4(r[2], r[5], r[8], 0), new Vector4(t[0] * 0.001f, t[1] * 0.001f, t[2] * 0.001f, 1)); //Debug.Log("Depth2colorCamMat Pos: " + (Vector3)depth2colorCamMat.GetColumn(3) + ", Rot: " + depth2colorCamMat.rotation.eulerAngles); // flip color & depth image vertically sensorData.colorImageScale = new Vector3(-1f, -1f, 1f); sensorData.depthImageScale = new Vector3(-1f, -1f, 1f); sensorData.infraredImageScale = new Vector3(-1f, -1f, 1f); sensorData.sensorSpaceScale = new Vector3(-1f, -1f, 1f); sensorData.unitToMeterFactor = 0.001f; // depth camera offset & matrix z-flip sensorRotOffset = Vector3.zero; // new Vector3(6f, 0f, 0f); // the depth camera is tilted 6 degrees downwards sensorRotFlipZ = true; sensorRotIgnoreY = true; // color camera data & intrinsics sensorData.colorImageFormat = TextureFormat.BGRA32; sensorData.colorImageStride = 4; // 4 bytes per pixel if (consoleLogMessages) { Debug.Log("D" + deviceIndex + " DummyK4A-sensor opened"); } return(sensorData); }
// initializes background removal with shaders private bool InitBackgroundRemoval(KinectInterop.SensorData sensorData) { if (sensorData != null && sensorData.sensorInterface != null && KinectInterop.IsDirectX11Available()) { if (filterByBody != null) { if (!filterByBody.InitBackgroundRemoval(sensorData, MAX_BODY_COUNT)) { Debug.LogError("Could not init the background removal by body bounds!"); return(false); } } else if (filterByBI != null) { if (!filterByBI.InitBackgroundRemoval(sensorData)) { Debug.LogError("Could not init the background removal by body index!"); return(false); } } sensorInt = (DepthSensorBase)sensorData.sensorInterface; // set the texture resolution if (sensorInt.pointCloudColorTexture == null && sensorInt.pointCloudVertexTexture == null) { sensorInt.pointCloudResolution = foregroundImageResolution; } textureRes = sensorInt.GetPointCloudTexResolution(sensorData); colorTexture = KinectInterop.CreateRenderTexture(colorTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32); if (filterByBI == null) { vertexTexture = KinectInterop.CreateRenderTexture(vertexTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGBHalf); } alphaTexture = KinectInterop.CreateRenderTexture(alphaTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32); foregroundTexture = KinectInterop.CreateRenderTexture(foregroundTexture, textureRes.x, textureRes.y, RenderTextureFormat.ARGB32); sensorInt.pointCloudColorTexture = colorTexture; sensorInt.pointCloudVertexTexture = vertexTexture; Shader erodeShader = Shader.Find("Kinect/ErodeShader"); erodeFilterMat = new Material(erodeShader); erodeFilterMat.SetFloat("_TexResX", (float)textureRes.x); erodeFilterMat.SetFloat("_TexResY", (float)textureRes.y); //sensorData.erodeBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); Shader dilateShader = Shader.Find("Kinect/DilateShader"); dilateFilterMat = new Material(dilateShader); dilateFilterMat.SetFloat("_TexResX", (float)textureRes.x); dilateFilterMat.SetFloat("_TexResY", (float)textureRes.y); //sensorData.dilateBodyMaterial.SetTexture("_MainTex", sensorData.bodyIndexTexture); Shader gradientShader = Shader.Find("Kinect/GradientShader"); gradientFilterMat = new Material(gradientShader); Shader medianShader = Shader.Find("Kinect/MedianShader"); medianFilterMat = new Material(medianShader); //sensorData.medianBodyMaterial.SetFloat("_Amount", 1.0f); Shader blurShader = Shader.Find("Kinect/BlurShader"); blurFilterMat = new Material(blurShader); Shader invertShader = Shader.Find("Kinect/InvertShader"); invertAlphaMat = new Material(invertShader); Shader foregroundShader = Shader.Find("Kinect/ForegroundShader"); foregroundMat = new Material(foregroundShader); return(true); } return(false); }
// processes the camera frames private void ProcessCameraFrames(KinectInterop.SensorData sensorData, Capture capture) { //// check for color & depth sync //if (isSyncDepthAndColor && (capture.Color == null || capture.Depth == null)) // return; Capture btCapture = null; try { if (bodyTracker != null) { // check for available body frame btCapture = PollBodyFrame(sensorData, capture); if (isSyncBodyAndDepth) { // process the body tracking capture capture = btCapture; //if(capture != null) // Debug.Log("BtDepthTimestamp: " + capture.Depth.DeviceTimestamp.Ticks); } } // check for body & depth sync if (!isSyncBodyAndDepth || btCapture != null /**&& (btQueueCount == 0 && sensorData.lastBodyFrameTime == rawDepthTimestamp)*/) // currentBodyTimestamp { // color frame if (capture.Color != null && rawColorImage != null && rawColorTimestamp == sensorData.lastColorFrameTime) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("color", capture.Color.DeviceTimestamp.Ticks); } lock (colorFrameLock) { //capture.Color.CopyBytesTo(rawColorImage, 0, 0, rawColorImage.Length); KinectInterop.CopyBytes(capture.Color.GetBuffer(), (int)capture.Color.Size, rawColorImage, rawColorImage.Length, sizeof(byte)); rawColorTimestamp = (ulong)capture.Color.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawColorTimestamp: " + rawColorTimestamp); } } // depth frame if (capture.Depth != null && rawDepthImage != null && rawDepthTimestamp == sensorData.lastDepthFrameTime) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("depth", capture.Depth.DeviceTimestamp.Ticks); } lock (depthFrameLock) { //capture.Depth.CopyTo(rawDepthImage, 0, 0, rawDepthImage.Length); KinectInterop.CopyBytes(capture.Depth.GetBuffer(), (int)capture.Depth.Size, rawDepthImage, rawDepthImage.Length, sizeof(ushort)); rawDepthTimestamp = (ulong)capture.Depth.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawDepthTimestamp: " + rawDepthTimestamp); } } // infrared frame if (capture.IR != null && rawInfraredImage != null && rawInfraredTimestamp == sensorData.lastInfraredFrameTime) { if (kinectPlayback != null) { WaitForPlaybackTimestamp("ir", capture.IR.DeviceTimestamp.Ticks); } lock (infraredFrameLock) { //capture.IR.CopyTo(rawInfraredImage, 0, 0, rawInfraredImage.Length); KinectInterop.CopyBytes(capture.IR.GetBuffer(), (int)capture.IR.Size, rawInfraredImage, rawInfraredImage.Length, sizeof(ushort)); rawInfraredTimestamp = (ulong)capture.IR.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawInfraredTimestamp: " + rawInfraredTimestamp); } } // transformation data frames if ((depthCamColorDataFrame != null || colorCamDepthDataFrame != null) && capture.Color != null && capture.Depth != null && (rawColorTimestamp != sensorData.lastColorFrameTime || rawDepthTimestamp != sensorData.lastDepthFrameTime)) { if (coordMapperTransform == null) { coordMapperTransform = coordMapperCalib.CreateTransformation(); } if (depthCamColorDataFrame != null) { if (d2cColorData == null) { d2cColorData = new Image(ImageFormat.ColorBGRA32, sensorData.depthImageWidth, sensorData.depthImageHeight, sensorData.depthImageWidth * sensorData.colorImageStride); } lock (depthCamColorFrameLock) { coordMapperTransform.ColorImageToDepthCamera(capture.Depth, capture.Color, d2cColorData); d2cColorData.CopyTo <byte>(depthCamColorDataFrame, 0, 0, depthCamColorDataFrame.Length); lastDepthCamColorFrameTime = (ulong)capture.Depth.DeviceTimestamp.Ticks; } } if (colorCamDepthDataFrame != null) { if (c2dDepthData == null) { c2dDepthData = new Image(ImageFormat.Depth16, sensorData.colorImageWidth, sensorData.colorImageHeight, sensorData.colorImageWidth * sizeof(ushort)); } lock (colorCamDepthFrameLock) { coordMapperTransform.DepthImageToColorCamera(capture.Depth, c2dDepthData); c2dDepthData.CopyTo <ushort>(colorCamDepthDataFrame, 0, 0, colorCamDepthDataFrame.Length); lastColorCamDepthFrameTime = (ulong)capture.Color.DeviceTimestamp.Ticks; } } } } else { // ignore the capture capture = null; } if (btCapture != null) { // dispose body capture btCapture.Dispose(); } } catch (System.Exception ex) { Debug.LogException(ex); } }
/// <summary> /// Initializes the buffers and shaders used by the floor detector. /// </summary> /// <param name="sensorData">Sensor data</param> /// <param name="maxDepthMm">Max depth distance in mm</param> public void InitFloorDetector(MonoBehaviour caller, KinectInterop.SensorData sensorData, int maxDepthMm) { this.callerInstance = caller; this.sensorData = sensorData; if (sensorData == null || sensorData.depthImageWidth == 0 || sensorData.depthImageHeight == 0) { return; } if (floorDetOffsetEstShader == null) { floorDetOffsetEstShader = Resources.Load("FloorDetectionOffsetEstShader") as ComputeShader; floorDetOffsetEstKernel = floorDetOffsetEstShader != null?floorDetOffsetEstShader.FindKernel("EstimatePointCloudPosOfs") : -1; } if (floorDetOffsetMinMaxShader == null) { floorDetOffsetMinMaxShader = Resources.Load("FloorDetectionOffsetMinMaxShader") as ComputeShader; floorDetOffsetMinMaxKernel = floorDetOffsetMinMaxShader != null?floorDetOffsetMinMaxShader.FindKernel("EstimateOffsetMinMax") : -1; } if (floorDetOffsetHistShader == null) { floorDetOffsetHistShader = Resources.Load("FloorDetectionOffsetHistShader") as ComputeShader; floorDetOffsetHistKernel = floorDetOffsetHistShader != null?floorDetOffsetHistShader.FindKernel("EstimateOffsetHist") : -1; } if (floorDetPlaneEstShader == null) { floorDetPlaneEstShader = Resources.Load("FloorDetectionPlanePointsShader") as ComputeShader; floorDetPlaneEstKernel = floorDetPlaneEstShader != null?floorDetPlaneEstShader.FindKernel("EstimatePlanePoints") : -1; } if (pointCloudSpaceBuffer == null) { int spaceBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight * 3; pointCloudSpaceBuffer = new ComputeBuffer(spaceBufferLength, sizeof(float)); } if (pointCloudDepthBuffer == null) { int depthBufferLength = (sensorData.depthImageWidth * sensorData.depthImageHeight) >> 1; pointCloudDepthBuffer = new ComputeBuffer(depthBufferLength, sizeof(uint)); } if (pointCloudPosBuffer == null) { int posBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight * 3; pointCloudPosBuffer = new ComputeBuffer(posBufferLength, sizeof(float)); } if (pointCloudOfsBuffer == null) { int ofsBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight; pointCloudOfsBuffer = new ComputeBuffer(ofsBufferLength, sizeof(float)); } if (pointCloudMaskBuffer == null) { int maskBufferLength = sensorData.depthImageWidth * sensorData.depthImageHeight; pointCloudMaskBuffer = new ComputeBuffer(maskBufferLength, sizeof(int)); } if (ofsHistMinMaxBuffer == null) { histMinMax = new float[2]; ofsHistMinMaxBuffer = new ComputeBuffer(histMinMax.Length, sizeof(float)); } // hist bin size float planeDisplacementRangeInMeters = 0.050f; // 5 cm in meters binAggregation = 6; histBinSize = planeDisplacementRangeInMeters / binAggregation; float fMaxDepth = (float)maxDepthMm / 1000f; histBufferLength = Mathf.FloorToInt(2 * fMaxDepth / histBinSize) + 1; Debug.Log("histBinSize: " + histBinSize + ", histBufferLength: " + histBufferLength); if (ofsHistBinLeftBuffer == null) { ofsHistBinLeftBuffer = new ComputeBuffer(histBufferLength, sizeof(float)); } if (ofsHistBinCountBuffer == null) { ofsHistBinCountBuffer = new ComputeBuffer(histBufferLength, sizeof(uint)); } if (histCumulativeCountBuffer == null) { histCumulativeCountBuffer = new ComputeBuffer(histBufferLength, sizeof(uint)); } if (planeIndicesBuffer == null) { int planeIndicesLength = sensorData.depthImageWidth * sensorData.depthImageHeight; planeIndicesBuffer = new ComputeBuffer(planeIndicesLength, sizeof(uint)); } if (planePosNormBuffer == null) { planePosNorm = new float[4 * 3]; // pos & norm are v3 planePosNormBuffer = new ComputeBuffer(planePosNorm.Length, sizeof(float)); } spaceScale = sensorData.sensorSpaceScale; minFloorPointCount = 1024; planeMaxTiltInDeg = 5f; imuUpVector = Vector3.up; bPlaneValid = false; if (callerInstance != null) { isRoutineRunning = true; floorRoutine = UpdateFloorAsync(); callerInstance.StartCoroutine(floorRoutine); } }
public override KinectInterop.SensorData OpenSensor(KinectManager kinectManager, KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth) { // save initial parameters base.OpenSensor(kinectManager, dwFlags, bSyncDepthAndColor, bSyncBodyAndDepth); // color settings int colorWidth = 0, colorHeight = 0, colorFps = 0; ParseCameraMode(colorCameraMode.ToString(), out colorWidth, out colorHeight, out colorFps); // depth settings int depthWidth = 0, depthHeight = 0, depthFps = 0; ParseCameraMode(depthCameraMode.ToString(), out depthWidth, out depthHeight, out depthFps); try { m_pipeline = new Pipeline(); using (Config config = new Config()) { if (deviceStreamingMode == KinectInterop.DeviceStreamingMode.PlayRecording) { if (string.IsNullOrEmpty(recordingFile)) { Debug.LogError("PlayRecording selected, but the path to recording file is missing."); return(null); } if (!System.IO.File.Exists(recordingFile)) { Debug.LogError("PlayRecording selected, but the recording file cannot be found: " + recordingFile); return(null); } sensorPlatform = KinectInterop.DepthSensorPlatform.RealSense; sensorDeviceId = KinectInterop.GetFileName(recordingFile, false); // playback from file if (consoleLogMessages) { Debug.Log("Playing back: " + recordingFile); } config.EnableDeviceFromFile(recordingFile, false); } else { // get the list of available sensors List <KinectInterop.SensorDeviceInfo> alSensors = GetAvailableSensors(); if (deviceIndex >= alSensors.Count) { Debug.LogError(" D" + deviceIndex + " is not available. You can set the device index to -1, to disable it."); return(null); } // sensor serial number sensorPlatform = KinectInterop.DepthSensorPlatform.RealSense; sensorDeviceId = alSensors[deviceIndex].sensorId; config.EnableDevice(sensorDeviceId); // color if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) { //Debug.Log(string.Format("Color camera mode: {0} x {1} @ {2} FPS", colorWidth, colorHeight, colorFps)); config.EnableStream(Stream.Color, -1, colorWidth, colorHeight, Format.Rgb8, colorFps); } // depth if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) { //Debug.Log(string.Format("Depth camera mode: {0} x {1} @ {2} FPS", depthWidth, depthHeight, depthFps)); config.EnableStream(Stream.Depth, -1, depthWidth, depthHeight, Format.Z16, depthFps); } // infrared if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0 /**|| (dwFlags & KinectInterop.FrameSource.TypeBody) != 0*/) { //Debug.Log(string.Format("Infrared camera mode: {0} x {1} @ {2} FPS", depthWidth, depthHeight, depthFps)); config.EnableStream(Stream.Infrared, 1, depthWidth, depthHeight, Format.Y8, depthFps); //config.EnableStream(Stream.Infrared, 2, depthWidth, depthHeight, Format.Y8, depthFps); } // pose if ((dwFlags & KinectInterop.FrameSource.TypePose) != 0) { config.EnableStream(Stream.Pose, Format.SixDOF); } //// record to file //if(deviceMode == KinectInterop.DepthSensorMode.CreateRecording && !string.IsNullOrEmpty(deviceFilePath)) //{ // if (!string.IsNullOrEmpty(deviceFilePath)) // { // config.EnableRecordToFile(deviceFilePath); // } // else // { // Debug.LogError("Record selected, but the path to recording file is missing."); // } //} } activeProfile = m_pipeline.Start(config); } } catch (Exception ex) { Debug.LogError("RealSenseInterface: " + ex.ToString()); } // check if the profile was successfully created if (activeProfile == null) { return(null); } KinectInterop.SensorData sensorData = new KinectInterop.SensorData(); sensorData.sensorIntPlatform = sensorPlatform; // flip color & depth images vertically sensorData.colorImageScale = new Vector3(-1f, -1f, 1f); sensorData.depthImageScale = new Vector3(-1f, -1f, 1f); sensorData.infraredImageScale = new Vector3(-1f, -1f, 1f); sensorData.sensorSpaceScale = new Vector3(-1f, -1f, 1f); // depth camera offset & matrix z-flip sensorRotOffset = Vector3.zero; // if for instance the depth camera is tilted downwards sensorRotFlipZ = true; sensorRotIgnoreY = false; // color sensorData.colorImageWidth = colorWidth; sensorData.colorImageHeight = colorHeight; sensorData.colorImageFormat = TextureFormat.RGB24; sensorData.colorImageStride = 3; // 3 bytes per pixel if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) { rawColorImage = new byte[sensorData.colorImageWidth * sensorData.colorImageHeight * 3]; sensorData.colorImageTexture = new Texture2D(sensorData.colorImageWidth, sensorData.colorImageHeight, TextureFormat.RGB24, false); sensorData.colorImageTexture.wrapMode = TextureWrapMode.Clamp; sensorData.colorImageTexture.filterMode = FilterMode.Point; } // depth sensorData.depthImageWidth = depthWidth; sensorData.depthImageHeight = depthHeight; if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) { rawDepthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; sensorData.depthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; } // infrared if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0 || (dwFlags & KinectInterop.FrameSource.TypeBody) != 0) { rawInfraredImage1 = new byte[sensorData.depthImageWidth * sensorData.depthImageHeight]; rawInfraredImage2 = new byte[sensorData.depthImageWidth * sensorData.depthImageHeight]; rawInfraredImageBT = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; rawInfraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; sensorData.infraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; minInfraredValue = 0f; maxInfraredValue = 1000f; } if (consoleLogMessages) { Debug.Log("RealSense-sensor opened: " + sensorDeviceId); } return(sensorData); }
//----------------------------------- end of public functions --------------------------------------// //void Awake() //{ // instance = this; //} public void Start() { try { // get sensor data kinectManager = KinectManager.Instance; if (kinectManager && kinectManager.IsInitialized()) { sensorData = kinectManager.GetSensorData(sensorIndex); } if (sensorData == null || sensorData.sensorInterface == null) { throw new Exception("Background removal cannot be started, because KinectManager is missing or not initialized."); } if (foregroundImage == null) { // look for a foreground image foregroundImage = GetComponent <UnityEngine.UI.RawImage>(); } if (!foregroundCamera) { // by default - the main camera foregroundCamera = Camera.main; } // try to get reference to other filter components filterByDist = GetComponent <BackgroundRemovalByDist>(); filterByBI = GetComponent <BackgroundRemovalByBodyIndex>(); // Initialize the background removal bool bSuccess = InitBackgroundRemoval(sensorData); if (bSuccess) { if (debugText != null) { debugText.text = string.Empty; } } else { throw new Exception("Background removal could not be initialized."); } bBackgroundRemovalInited = bSuccess; } catch (DllNotFoundException ex) { Debug.LogError(ex.ToString()); if (debugText != null) { debugText.text = "Please check the SDK installations."; } } catch (Exception ex) { Debug.LogException(ex); if (debugText != null) { debugText.text = ex.Message; } } }
private void ProcessCameraFrames(KinectInterop.SensorData sensorData, RealSenseFrames frames) { Capture btCapture = null; //if (bodyTracker != null) //{ // // body frame // if (frames.depthFrame != null && frames.infraredFrame != null) // { // Capture capture = GetBodyTrackerCapture(sensorData, frames); // btCapture = PollBodyFrame(sensorData, capture); // capture?.Dispose(); // } //} // check for body & depth sync if (!isSyncBodyAndDepth || btCapture != null /**|| bodyTracker == null*/) { if (isSyncBodyAndDepth && btCapture != null) { ulong depthFrameTime = isSyncDepthAndColor && btCapture.Depth != null ? (ulong)btCapture.Depth.DeviceTimestamp.Ticks : 0; // body-tracker frame if (btCapture.Depth != null && rawDepthImage != null && (getAllSensorFrames || rawDepthTimestamp == sensorData.lastDepthFrameTime)) { lock (depthFrameLock) { //btCapture.Depth.CopyTo(rawDepthImage, 0, 0, rawDepthImage.Length); KinectInterop.CopyBytes(btCapture.Depth.GetBuffer(), (int)btCapture.Depth.Size, rawDepthImage, rawDepthImage.Length, sizeof(ushort)); rawDepthTimestamp = (ulong)btCapture.Depth.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawDepthTimestamp: " + rawDepthTimestamp); } } if (btCapture.Color != null && rawColorImage != null && (getAllSensorFrames || rawColorTimestamp == sensorData.lastColorFrameTime)) { lock (colorFrameLock) { //btCapture.Color.CopyBytesTo(rawColorImage, 0, 0, rawColorImage.Length); KinectInterop.CopyBytes(btCapture.Color.GetBuffer(), (int)btCapture.Color.Size, rawColorImage, rawColorImage.Length, sizeof(byte)); rawColorTimestamp = depthFrameTime != 0 ? depthFrameTime : (ulong)btCapture.Color.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawColorTimestamp: " + rawColorTimestamp); } } if (btCapture.IR != null && rawInfraredImage != null && (getAllSensorFrames || rawInfraredTimestamp == sensorData.lastInfraredFrameTime)) { lock (infraredFrameLock) { //btCapture.IR.CopyTo(rawInfraredImage, 0, 0, rawInfraredImage.Length); KinectInterop.CopyBytes(btCapture.IR.GetBuffer(), (int)btCapture.IR.Size, rawInfraredImage, rawInfraredImage.Length, sizeof(ushort)); rawInfraredTimestamp = depthFrameTime != 0 ? depthFrameTime : (ulong)btCapture.IR.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawInfraredTimestamp: " + rawInfraredTimestamp); } } } else { // sensor frame ulong depthFrameTime = isSyncDepthAndColor && frames.depthFrame != null ? frames.deviceTimestamp : 0; if (frames.depthFrame != null && rawDepthImage != null && (getAllSensorFrames || rawDepthTimestamp == sensorData.lastDepthFrameTime)) { lock (depthFrameLock) { frames.depthFrame.CopyTo <ushort>(rawDepthImage); rawDepthTimestamp = frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawDepthTimestamp: " + rawDepthTimestamp); } } if (frames.colorFrame != null && rawColorImage != null && (getAllSensorFrames || rawColorTimestamp == sensorData.lastColorFrameTime)) { lock (colorFrameLock) { KinectInterop.CopyBytes(frames.colorFrame.Data, rawColorImage.Length, rawColorImage, rawColorImage.Length, sizeof(byte)); rawColorTimestamp = depthFrameTime != 0 ? depthFrameTime : frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawColorTimestamp: " + rawColorTimestamp); } } if (frames.infraredFrame != null && rawInfraredImage != null && (getAllSensorFrames || rawInfraredTimestamp == sensorData.lastInfraredFrameTime)) { lock (infraredFrameLock) { frames.infraredFrame.CopyTo <byte>(rawInfraredImage1); for (int i = 0; i < rawInfraredImage1.Length; i++) { rawInfraredImage[i] = (ushort)(rawInfraredImage1[i] << 4); } rawInfraredTimestamp = depthFrameTime != 0 ? depthFrameTime : frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawInfraredTimestamp: " + rawInfraredTimestamp); } } } } // color & depth stream profiles if (frames.colorFrame != null) { colorStreamProfile = frames.colorFrame.Profile.As <VideoStreamProfile>(); } if (frames.depthFrame != null) { depthStreamProfile = frames.depthFrame.Profile.As <VideoStreamProfile>(); } // dispose body capture if (btCapture != null) { btCapture.Dispose(); } // check for pose frame if (frames.poseFrame != null) { frames.poseFrame.CopyTo(out rsPoseData); lock (poseFrameLock) { rawPosePosition = new Vector3(rsPoseData.translation.x, rsPoseData.translation.y, -rsPoseData.translation.z); // (1, 1, -1) rawPoseRotation = new Quaternion(-rsPoseData.rotation.x, -rsPoseData.rotation.y, rsPoseData.rotation.z, rsPoseData.rotation.w); // (-1, -1, 1, 1); rawPoseTimestamp = frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawPoseTimestamp: " + rawPoseTimestamp); } } }
// computes current background removal texture private bool UpdateBackgroundRemoval(KinectInterop.SensorData sensorData) { if (bBackgroundRemovalInited && (lastDepth2SpaceFrameTime != sensorData.lastDepth2SpaceFrameTime || sensorData.usedColorBodyIndexBufferTime != sensorData.lastColorBodyIndexBufferTime)) { RenderTexture[] tempTextures = new RenderTexture[2]; tempTextures[0] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0); tempTextures[1] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0); RenderTexture[] tempGradTextures = null; if (applyGradientFilter) { tempGradTextures = new RenderTexture[2]; tempGradTextures[0] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0); tempGradTextures[1] = RenderTexture.GetTemporary(textureRes.x, textureRes.y, 0); } // filter if (filterByDist != null && sensorInt != null) { // filter by distance filterByDist.ApplyVertexFilter(vertexTexture, alphaTexture, sensorInt.GetSensorToWorldMatrix()); } else if (filterByBI != null) { // filter by body index filterByBI.ApplyForegroundFilterByBodyIndex(alphaTexture, sensorData, kinectManager, playerIndex, MAX_BODY_COUNT); } else if (foregroundFilterShader != null && sensorInt != null) { // filter by bodies ApplyForegroundFilterByBody(); } if (filterByBI == null) { Graphics.Blit(vertexTexture, alphaTexture); } lastDepth2SpaceFrameTime = sensorData.lastDepth2SpaceFrameTime; // median if (applyMedianFilter) { ApplySimpleFilter(alphaTexture, alphaTexture, medianFilterMat, tempTextures); } //else //{ // Graphics.Blit(vertexTexture, alphaTexture); //} // erode0 ApplyIterableFilter(alphaTexture, alphaTexture, erodeFilterMat, erodeIterations0, tempTextures); if (applyGradientFilter) { Graphics.CopyTexture(alphaTexture, tempGradTextures[0]); } // dilate ApplyIterableFilter(alphaTexture, alphaTexture, dilateFilterMat, dilateIterations, tempTextures); if (applyGradientFilter) { //Graphics.Blit(alphaTexture, tempGradTextures[1]); gradientFilterMat.SetTexture("_ErodeTex", tempGradTextures[0]); ApplySimpleFilter(alphaTexture, tempGradTextures[1], gradientFilterMat, tempTextures); } // erode ApplyIterableFilter(alphaTexture, alphaTexture, erodeFilterMat, erodeIterations, tempTextures); if (tempGradTextures != null) { Graphics.Blit(alphaTexture, tempGradTextures[0]); } // blur if (applyBlurFilter) { ApplySimpleFilter(alphaTexture, alphaTexture, blurFilterMat, tempTextures); } if (invertAlphaMask) { ApplySimpleFilter(alphaTexture, alphaTexture, invertAlphaMat, tempTextures); } if (!computeAlphaMaskOnly) { foregroundMat.SetTexture("_ColorTex", colorTexture); foregroundMat.SetTexture("_GradientTex", tempGradTextures[1]); Color gradientColor = (erodeIterations0 != 0 || dilateIterations != 0 || erodeIterations != 0) ? bodyContourColor : Color.clear; foregroundMat.SetColor("_GradientColor", gradientColor); ApplySimpleFilter(alphaTexture, foregroundTexture, foregroundMat, tempTextures); } else { Graphics.CopyTexture(alphaTexture, foregroundTexture); } // cleanup if (tempGradTextures != null) { RenderTexture.ReleaseTemporary(tempGradTextures[0]); RenderTexture.ReleaseTemporary(tempGradTextures[1]); } RenderTexture.ReleaseTemporary(tempTextures[0]); RenderTexture.ReleaseTemporary(tempTextures[1]); } return(true); }
public override KinectInterop.SensorData OpenSensor(KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth) { // save initial parameters base.OpenSensor(dwFlags, bSyncDepthAndColor, bSyncBodyAndDepth); // try to open the sensor or play back the recording KinectInterop.SensorData sensorData = new KinectInterop.SensorData(); if (deviceStreamingMode == KinectInterop.DeviceStreamingMode.PlayRecording) { if (string.IsNullOrEmpty(recordingFile)) { Debug.LogError("Playback selected, but the path to recording file is missing."); return(null); } if (!System.IO.File.Exists(recordingFile)) { Debug.LogError("PlayRecording selected, but the recording file cannot be found: " + recordingFile); return(null); } Debug.Log("Playing back: " + recordingFile); kinectPlayback = new Playback(recordingFile); sensorPlatform = KinectInterop.DepthSensorPlatform.Kinect4Azure; sensorDeviceId = KinectInterop.GetFileName(recordingFile, false); // deviceIndex.ToString(); //Debug.Log("D" + deviceIndex + " ID: " + sensorDeviceId); colorCameraMode = (ColorCameraMode)kinectPlayback.playback_config.color_resolution; depthCameraMode = (DepthCameraMode)kinectPlayback.playback_config.depth_mode; deviceSyncMode = kinectPlayback.playback_config.wired_sync_mode; coordMapperCalib = kinectPlayback.playback_calibration; playbackStartTime = 0; Debug.Log(string.Format("color_track_enabled: {0}, depth_track_enabled: {1}, ir_track_enabled: {2}, imu_track_enabled: {3}, depth_delay_off_color_usec: {4}", kinectPlayback.playback_config.color_track_enabled, kinectPlayback.playback_config.depth_track_enabled, kinectPlayback.playback_config.ir_track_enabled, kinectPlayback.playback_config.imu_track_enabled, kinectPlayback.playback_config.depth_delay_off_color_usec)); } else { List <KinectInterop.SensorDeviceInfo> alSensors = GetAvailableSensors(); if (deviceIndex >= alSensors.Count) { Debug.Log(" D" + deviceIndex + " is not available. You can set the device index to -1, to disable it."); return(null); } kinectSensor = Device.Open(deviceIndex); if (kinectSensor == null) { Debug.LogError(" D" + recordingFile + " cannot be opened."); return(null); } // try to open the sensor sensorPlatform = KinectInterop.DepthSensorPlatform.Kinect4Azure; sensorDeviceId = kinectSensor.SerialNum; //Debug.Log("D" + deviceIndex + " ID: " + sensorDeviceId); if (deviceSyncMode == WiredSyncMode.Master && (dwFlags & KinectInterop.FrameSource.TypeColor) == 0) { // fix by Andreas Pedroni - master requires color camera on dwFlags |= KinectInterop.FrameSource.TypeColor; } DeviceConfiguration kinectConfig = new DeviceConfiguration(); kinectConfig.SynchronizedImagesOnly = isSyncDepthAndColor; kinectConfig.WiredSyncMode = deviceSyncMode; kinectConfig.SuboridinateDelayOffMaster = new TimeSpan(subDeviceDelayUsec * 10); // color kinectConfig.ColorFormat = ImageFormat.ColorBGRA32; if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) { kinectConfig.ColorResolution = (ColorResolution)colorCameraMode; } else { kinectConfig.ColorResolution = ColorResolution.Off; } // depth if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) { kinectConfig.DepthMode = (DepthMode)depthCameraMode; } else { kinectConfig.DepthMode = DepthMode.Off; } // fps if (colorCameraMode != ColorCameraMode._4096_x_3072_15Fps && depthCameraMode != DepthCameraMode._1024x1024_15Fps_2_21mWfov) { kinectConfig.CameraFPS = FPS.FPS30; } else { kinectConfig.CameraFPS = FPS.FPS15; } // infrared if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0) { // ?? } // start the cameras kinectSensor.StartCameras(kinectConfig); isCamerasStarted = true; if ((dwFlags & KinectInterop.FrameSource.TypePose) != 0) { // start the IMU kinectSensor.StartImu(); isImuStarted = true; } // get reference to the coordinate mapper coordMapperCalib = kinectSensor.GetCalibration(); } // reset the frame number //currentFrameNumber = 0; // flip color & depth image vertically sensorData.colorImageScale = new Vector3(-1f, -1f, 1f); sensorData.depthImageScale = new Vector3(-1f, -1f, 1f); sensorData.infraredImageScale = new Vector3(-1f, -1f, 1f); sensorData.sensorSpaceScale = new Vector3(-1f, -1f, 1f); // depth camera offset & matrix z-flip sensorRotOffset = new Vector3(6f, 0f, 0f); // the depth camera is tilted 6 degrees downwards sensorRotFlipZ = true; sensorRotIgnoreY = true; // color camera data & intrinsics sensorData.colorImageFormat = TextureFormat.BGRA32; sensorData.colorImageStride = 4; // 4 bytes per pixel if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0) { CameraCalibration colorCamera = coordMapperCalib.ColorCameraCalibration; sensorData.colorImageWidth = colorCamera.ResolutionWidth; sensorData.colorImageHeight = colorCamera.ResolutionHeight; rawColorImage = new byte[sensorData.colorImageWidth * sensorData.colorImageHeight * 4]; sensorData.colorImageTexture = new Texture2D(sensorData.colorImageWidth, sensorData.colorImageHeight, TextureFormat.BGRA32, false); sensorData.colorImageTexture.wrapMode = TextureWrapMode.Clamp; sensorData.colorImageTexture.filterMode = FilterMode.Point; } // depth camera data & intrinsics if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0) { CameraCalibration depthCamera = coordMapperCalib.DepthCameraCalibration; sensorData.depthImageWidth = depthCamera.ResolutionWidth; sensorData.depthImageHeight = depthCamera.ResolutionHeight; rawDepthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; sensorData.depthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; } // infrared data if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0) { if (sensorData.depthImageWidth == 0 || sensorData.depthImageHeight == 0) { CameraCalibration depthCamera = coordMapperCalib.DepthCameraCalibration; sensorData.depthImageWidth = depthCamera.ResolutionWidth; sensorData.depthImageHeight = depthCamera.ResolutionHeight; } rawInfraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; sensorData.infraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight]; minInfraredValue = 0f; maxInfraredValue = 1000f; } // imu data if (kinectPlayback != null || (dwFlags & KinectInterop.FrameSource.TypePose) != 0) { imuAhrs = new AHRS.MahonyAHRS(0.0006f, 5f); // 600us imuTurnRot1 = Quaternion.Euler(0f, 90f, 90f); imuTurnRot2 = Quaternion.Euler(90f, !flipImuRotation ? 90f : -90f, 0f); //lastFlipImuRot = flipImuRotation; } // calibration data GetCameraIntrinsics(CalibrationDeviceType.Color, coordMapperCalib.ColorCameraCalibration, ref sensorData.colorCamIntr); GetCameraIntrinsics(CalibrationDeviceType.Depth, coordMapperCalib.DepthCameraCalibration, ref sensorData.depthCamIntr); GetCameraExtrinsics(coordMapperCalib.ColorCameraCalibration.Extrinsics, ref sensorData.depth2ColorExtr); GetCameraExtrinsics(coordMapperCalib.DepthCameraCalibration.Extrinsics, ref sensorData.color2DepthExtr); // body & body-index data if ((dwFlags & KinectInterop.FrameSource.TypeBody) != 0) { InitBodyTracking(dwFlags, sensorData, coordMapperCalib, true); if (isSyncBodyAndDepth && bodyTracker == null) { // don't sync body and depth if the body tracker can't be created isSyncBodyAndDepth = false; } } Debug.Log("Kinect4Azure-sensor opened."); return(sensorData); }