/// <summary> /// 開啟RealSense相機並進行取像 /// </summary> internal void Open(out Image <Bgr, byte> ColorImg, out Image <Rgb, byte> DepthImg, out Image <Rgb, byte> FilteredImg, out VideoFrame color, out DepthFrame depth, out Frame filtered) { DepthImg = null; ColorImg = null; FilteredImg = null; color = null; depth = null; filtered = null; if (CamState != CameraState.Opened) { PipelineProfile = Camera.Start(cfg); // 以cfg設定並開始串流 vsp = PipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth); // 取得內部參數 intrinsics = vsp.GetIntrinsics(); sp = PipelineProfile.GetStream(Intel.RealSense.Stream.Color); // 取得外部參數 extrinsics = vsp.GetExtrinsicsTo(sp); CamState = CameraState.Opened; // 更新相機狀態 } else { try { FrameSet frames = Camera.WaitForFrames(); depth = frames.DepthFrame.DisposeWith(frames); color = frames.ColorFrame.DisposeWith(frames); filtered = depth; if (depth != null) { //Thres_Filter.Options[Option.MinDistance].Value = float.Parse(form1.textBox2.Text); //Thres_Filter.Options[Option.MaxDistance].Value = float.Parse(form1.textBox1.Text); //filtered = Thres_Filter.Process(filtered); //Spa_Filter.Options[Option.FilterMagnitude].Value = 1; //Spa_Filter.Options[Option.FilterSmoothAlpha].Value = 0.6f; //Spa_Filter.Options[Option.FilterSmoothDelta].Value = 8; //filtered = Spa_Filter.Process(filtered); Temp_Filter.Options[Option.FilterSmoothAlpha].Value = 0.5f; Temp_Filter.Options[Option.FilterSmoothDelta].Value = 20; Temp_Filter.Options[Option.HolesFill].Value = 2; filtered = Temp_Filter.Process(filtered); depColor = colorizer.Colorize(depth); filteredColor = colorizer.Colorize(filtered); ColorImg = new Image <Bgr, byte>(color.Width, color.Height, color.Stride, color.Data); DepthImg = new Image <Rgb, byte>(depColor.Width, depColor.Height, depColor.Stride, depColor.Data); FilteredImg = new Image <Rgb, byte>(filteredColor.Width, filteredColor.Height, filteredColor.Stride, filteredColor.Data); } } catch (Exception ex) { throw ex; } } }
private void SetupWindow(out Action <VideoFrame> depth, out Action <VideoFrame> color, PipelineProfile pp) { //Display Depth using (VideoStreamProfile p = pp.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>()) imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null); depth = UpdateImage(imgDepth); //Display Color using (VideoStreamProfile p = pp.GetStream(Intel.RealSense.Stream.Color).As <VideoStreamProfile>()) imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null); color = UpdateImage(imgColor); }
/// <summary> /// Creates a new coordinate mapper for the specified pipeline. /// </summary> /// <param name="pipeline">The specified pipeline.</param> /// <param name="colorWidth">The desired color frame width.</param> /// <param name="colorHeight">The desired color frame height.</param> /// <param name="depthWidth">The desired depth frame width.</param> /// <param name="depthHeight">The desired depth frame height.</param> /// <returns>The color/depth coordinate mapper of the current pipline, if all of the supported streams were found. Null otherwise.</returns> public static CoordinateMapper Create(PipelineProfile pipeline, int colorWidth, int colorHeight, int depthWidth, int depthHeight) { if (pipeline == null) { return(null); } if (pipeline.Streams == null) { return(null); } if (pipeline.Streams.Count == 0) { return(null); } StreamProfile colorProfile = null; StreamProfile depthProfile = null; foreach (StreamProfile profile in pipeline.Streams) { VideoStreamProfile videoProfile = profile as VideoStreamProfile; if (profile.Stream == Stream.Color && videoProfile.Width == colorWidth && videoProfile.Height == colorHeight) { colorProfile = profile; } else if (profile.Stream == Stream.Depth && videoProfile.Width == depthWidth && videoProfile.Height == depthHeight) { depthProfile = profile; } } if (colorProfile == null) { return(null); } if (depthProfile == null) { return(null); } Intrinsics colorIntrinsics = (colorProfile as VideoStreamProfile).GetIntrinsics(); Extrinsics colorExtrinsics = colorProfile.GetExtrinsicsTo(depthProfile); Intrinsics depthIntrinsics = (depthProfile as VideoStreamProfile).GetIntrinsics(); Extrinsics depthExtrinsics = depthProfile.GetExtrinsicsTo(colorProfile); return(Create(colorIntrinsics, colorExtrinsics, depthIntrinsics, depthExtrinsics)); }
private void ProcessCameraFrames(KinectInterop.SensorData sensorData, RealSenseFrames frames) { Capture btCapture = null; //if (bodyTracker != null) //{ // // body frame // if (frames.depthFrame != null && frames.infraredFrame != null) // { // Capture capture = GetBodyTrackerCapture(sensorData, frames); // btCapture = PollBodyFrame(sensorData, capture); // capture?.Dispose(); // } //} // check for body & depth sync if (!isSyncBodyAndDepth || btCapture != null /**|| bodyTracker == null*/) { if (isSyncBodyAndDepth && btCapture != null) { ulong depthFrameTime = isSyncDepthAndColor && btCapture.Depth != null ? (ulong)btCapture.Depth.DeviceTimestamp.Ticks : 0; // body-tracker frame if (btCapture.Depth != null && rawDepthImage != null && (getAllSensorFrames || rawDepthTimestamp == sensorData.lastDepthFrameTime)) { lock (depthFrameLock) { //btCapture.Depth.CopyTo(rawDepthImage, 0, 0, rawDepthImage.Length); KinectInterop.CopyBytes(btCapture.Depth.GetBuffer(), (int)btCapture.Depth.Size, rawDepthImage, rawDepthImage.Length, sizeof(ushort)); rawDepthTimestamp = (ulong)btCapture.Depth.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawDepthTimestamp: " + rawDepthTimestamp); } } if (btCapture.Color != null && rawColorImage != null && (getAllSensorFrames || rawColorTimestamp == sensorData.lastColorFrameTime)) { lock (colorFrameLock) { //btCapture.Color.CopyBytesTo(rawColorImage, 0, 0, rawColorImage.Length); KinectInterop.CopyBytes(btCapture.Color.GetBuffer(), (int)btCapture.Color.Size, rawColorImage, rawColorImage.Length, sizeof(byte)); rawColorTimestamp = depthFrameTime != 0 ? depthFrameTime : (ulong)btCapture.Color.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawColorTimestamp: " + rawColorTimestamp); } } if (btCapture.IR != null && rawInfraredImage != null && (getAllSensorFrames || rawInfraredTimestamp == sensorData.lastInfraredFrameTime)) { lock (infraredFrameLock) { //btCapture.IR.CopyTo(rawInfraredImage, 0, 0, rawInfraredImage.Length); KinectInterop.CopyBytes(btCapture.IR.GetBuffer(), (int)btCapture.IR.Size, rawInfraredImage, rawInfraredImage.Length, sizeof(ushort)); rawInfraredTimestamp = depthFrameTime != 0 ? depthFrameTime : (ulong)btCapture.IR.DeviceTimestamp.Ticks; //Debug.Log("D" + deviceIndex + " RawInfraredTimestamp: " + rawInfraredTimestamp); } } } else { // sensor frame ulong depthFrameTime = isSyncDepthAndColor && frames.depthFrame != null ? frames.deviceTimestamp : 0; if (frames.depthFrame != null && rawDepthImage != null && (getAllSensorFrames || rawDepthTimestamp == sensorData.lastDepthFrameTime)) { lock (depthFrameLock) { frames.depthFrame.CopyTo <ushort>(rawDepthImage); rawDepthTimestamp = frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawDepthTimestamp: " + rawDepthTimestamp); } } if (frames.colorFrame != null && rawColorImage != null && (getAllSensorFrames || rawColorTimestamp == sensorData.lastColorFrameTime)) { lock (colorFrameLock) { KinectInterop.CopyBytes(frames.colorFrame.Data, rawColorImage.Length, rawColorImage, rawColorImage.Length, sizeof(byte)); rawColorTimestamp = depthFrameTime != 0 ? depthFrameTime : frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawColorTimestamp: " + rawColorTimestamp); } } if (frames.infraredFrame != null && rawInfraredImage != null && (getAllSensorFrames || rawInfraredTimestamp == sensorData.lastInfraredFrameTime)) { lock (infraredFrameLock) { frames.infraredFrame.CopyTo <byte>(rawInfraredImage1); for (int i = 0; i < rawInfraredImage1.Length; i++) { rawInfraredImage[i] = (ushort)(rawInfraredImage1[i] << 4); } rawInfraredTimestamp = depthFrameTime != 0 ? depthFrameTime : frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawInfraredTimestamp: " + rawInfraredTimestamp); } } } } // color & depth stream profiles if (frames.colorFrame != null) { colorStreamProfile = frames.colorFrame.Profile.As <VideoStreamProfile>(); } if (frames.depthFrame != null) { depthStreamProfile = frames.depthFrame.Profile.As <VideoStreamProfile>(); } // dispose body capture if (btCapture != null) { btCapture.Dispose(); } // check for pose frame if (frames.poseFrame != null) { frames.poseFrame.CopyTo(out rsPoseData); lock (poseFrameLock) { rawPosePosition = new Vector3(rsPoseData.translation.x, rsPoseData.translation.y, -rsPoseData.translation.z); // (1, 1, -1) rawPoseRotation = new Quaternion(-rsPoseData.rotation.x, -rsPoseData.rotation.y, rsPoseData.rotation.z, rsPoseData.rotation.w); // (-1, -1, 1, 1); rawPoseTimestamp = frames.deviceTimestamp; //Debug.Log("D" + deviceIndex + " RawPoseTimestamp: " + rawPoseTimestamp); } } }