private PointF32[] GetInvUVMap(Intel.RealSense.Image color, Intel.RealSense.Image depth) { var invuvmap = new PointF32[color.Info.width * color.Info.height]; projection.QueryInvUVMap(depth, invuvmap); return(invuvmap); }
/// <summary> /// カラーイメージが更新された時の処理 /// </summary> /// <param name="color"></param> private void UpdateColorImage(Intel.RealSense.Image colorFrame) { if (colorFrame == null) { return; } //データの取得 ImageData data; //アクセス権の取得 Status ret = colorFrame.AcquireAccess(ImageAccess.ACCESS_READ, Intel.RealSense.PixelFormat.PIXEL_FORMAT_RGB32, out data); if (ret < Status.STATUS_NO_ERROR) { throw new Exception("カラー画像の取得に失敗"); } //ビットマップに変換する //画像の幅と高さ,フォーマットを取得 var info = colorFrame.Info; //1ライン当たりのバイト数を取得し(pitches[0]) 高さをかける (1pxel 3byte) var length = data.pitches[0] * info.height; //画素の色データの取得 //ToByteArrayでは色データのバイト列を取得する. var buffer = data.ToByteArray(0, length); //バイト列をビットマップに変換 imageColor.Source = BitmapSource.Create(info.width, info.height, 96, 96, PixelFormats.Bgr32, null, buffer, data.pitches[0]); //データを解放する colorFrame.ReleaseAccess(data); }
private void ProcessImages() { Sample sample = reader.Sample; Intel.RealSense.Image color = sample.Color; Intel.RealSense.Image depth = projection.CreateDepthImageMappedToColor(sample.Depth, color); //create depth mapped to color image ImageData colorData; ImageData depthData; color.AcquireAccess(ImageAccess.ACCESS_READ, colorPixelFormat, out colorData); depth.AcquireAccess(ImageAccess.ACCESS_READ, depthPixelFormat, out depthData); // Update the user interface UpdateUI(colorData, color.Info, ImageType.COLOR); UpdateUI(depthData, depth.Info, ImageType.DEPTH); if ((captureImage || captureSeries) && !pauseSave) { int cwidth = color.Info.width; int cheight = color.Info.height; int dwidth = depth.Info.width; int dheight = depth.Info.height; float[] depthPixels = ImageToFloatArray(depth); PointF32[] invuvmap = GetInvUVMap(color, depth); Point3DF32[] mappedPixels = GetMappedPixels(cwidth, cheight, dwidth, dheight, invuvmap, depthPixels); Bitmap depthBitmap = depthBitmap = GetDepthF32Bitmap(depth.Info.width, depth.Info.height, mappedPixels, allLandmarks); Bitmap colorBitmap = colorBitmap = colorData.ToBitmap(0, cwidth, cheight); //save image if (captureImage) { SaveSingleRgbdToDisk(colorBitmap, depthBitmap, mappedPixels); } else if (captureSeries) { SaveSeriesRgbdToDisk(dirName, colorBitmap, depthBitmap, mappedPixels); } depthBitmap.Dispose(); colorBitmap.Dispose(); } //release access color.ReleaseAccess(colorData); depth.ReleaseAccess(depthData); color.Dispose(); depth.Dispose(); projection.Dispose(); }
private float[] ImageToFloatArray(Intel.RealSense.Image depth) { ImageData ddata; depth.AcquireAccess(ImageAccess.ACCESS_READ, Intel.RealSense.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out ddata); var dwidth = depth.Info.width; var dheight = depth.Info.height; var dPixels = ddata.ToFloatArray(0, dwidth * dheight); depth.ReleaseAccess(ddata); return(dPixels); }
public void RenderStreams(RS.Sample sample) { /* Render streams */ EventHandler <RenderFrameEventArgs> render = RenderFrame; RS.Image image = null; if (StreamType != RS.StreamType.STREAM_TYPE_ANY && render != null) { // ??????????????????????? image = sample[StreamType]; render(this, new RenderFrameEventArgs(0, image)); //render(this, new RenderFrameEventArgs(1, image)); } }
public void RenderStreams(RS.Sample sample) { /* Render streams */ EventHandler <RenderFrameEventArgs> render = RenderFrame; RS.Image image = null; if (render != null) { image = sample[RS.StreamType.STREAM_TYPE_COLOR]; render(this, new RenderFrameEventArgs(0, image)); if (StreamType == RS.StreamType.STREAM_TYPE_ANY) { return; } else { image = sample[StreamType]; render(this, new RenderFrameEventArgs(1, image)); } } }
private void Rm_RGBImageRetreived(RS.Image handImage) { renderRGB.UpdatePanel(handImage); }
private void Rm_DepthImageRetreived(RS.Image handImage) { renderDepth.UpdatePanel(handImage); }
public RenderFrameEventArgs(int index, RS.Image image) { this.Index = index; this.Image = image; }
public void StreamColorDepth() /* Stream Color and Depth Synchronously or Asynchronously */ { try { bool sts = true; /* Create an instance of the RS.SenseManager interface */ RS.SenseManager sm = RS.SenseManager.CreateInstance(); if (sm == null) { SetStatus("Failed to create an SDK pipeline object"); return; } /* Optional: if playback or recoridng */ if ((Playback || Record) && File != null) { sm.CaptureManager.SetFileName(File, Record); } /* Optional: Set Input Source */ if (!Playback && DeviceInfo != null) { sm.CaptureManager.FilterByDeviceInfo(DeviceInfo); } /* Set Color & Depth Resolution and enable streams */ if (StreamProfileSet != null) { /* Optional: Filter the data based on the request */ sm.CaptureManager.FilterByStreamProfiles(StreamProfileSet); /* Enable raw data streaming for specific stream types */ for (int s = 0; s < RS.Capture.STREAM_LIMIT; s++) { RS.StreamType st = RS.Capture.StreamTypeFromIndex(s); RS.StreamProfile info = StreamProfileSet[st]; if (info.imageInfo.format != 0) { /* For simple request, you can also use sm.EnableStream(...) */ RS.DataDesc desc = new RS.DataDesc(); desc.streams[st].frameRate.min = desc.streams[st].frameRate.max = info.frameRate.max; desc.streams[st].sizeMin.height = desc.streams[st].sizeMax.height = info.imageInfo.height; desc.streams[st].sizeMin.width = desc.streams[st].sizeMax.width = info.imageInfo.width; desc.streams[st].options = info.options; desc.receivePartialSample = true; RS.SampleReader sampleReader = RS.SampleReader.Activate(sm); sampleReader.EnableStreams(desc); } } } /* Initialization */ SetStatus("Init Started"); if (sm.Init() >= RS.Status.STATUS_NO_ERROR) { /* Reset all properties */ sm.CaptureManager.Device.ResetProperties(RS.StreamType.STREAM_TYPE_ANY); /* Set mirror mode */ RS.MirrorMode mirror = Mirror ? RS.MirrorMode.MIRROR_MODE_HORIZONTAL : RS.MirrorMode.MIRROR_MODE_DISABLED; sm.CaptureManager.Device.MirrorMode = mirror; SetStatus("Streaming"); while (!Stop) { /* Wait until a frame is ready: Synchronized or Asynchronous */ if (sm.AcquireFrame(Synced) < RS.Status.STATUS_NO_ERROR) { break; } /* Display images */ RS.Sample sample = sm.Sample; /* Render streams */ EventHandler <RenderFrameEventArgs> render = RenderFrame; RS.Image image = null; if (MainPanel != RS.StreamType.STREAM_TYPE_ANY && render != null) { image = sample[MainPanel]; render(this, new RenderFrameEventArgs(0, image)); } if (PIPPanel != RS.StreamType.STREAM_TYPE_ANY && render != null) { render(this, new RenderFrameEventArgs(1, sample[PIPPanel])); } /* Optional: Set Mirror State */ mirror = Mirror ? RS.MirrorMode.MIRROR_MODE_HORIZONTAL : RS.MirrorMode.MIRROR_MODE_DISABLED; if (mirror != sm.CaptureManager.Device.MirrorMode) { sm.CaptureManager.Device.MirrorMode = mirror; } /* Optional: Show performance tick */ sm.ReleaseFrame(); } } else { SetStatus("Init Failed"); sts = false; } sm.Dispose(); if (sts) { SetStatus("Stopped"); } } catch (Exception e) { SetStatus(e.GetType().ToString()); } }