void OnEnable()
    {
        m_pipeline = new Pipeline();
        using (var cfg = DeviceConfiguration.ToPipelineConfig())
        {
            // ActiveProfile = m_config.Resolve(m_pipeline);
            ActiveProfile = m_pipeline.Start(cfg);
        }

        DeviceConfiguration.Profiles = new VideoStreamRequest[ActiveProfile.Streams.Count];
        for (int i = 0; i < DeviceConfiguration.Profiles.Length; i++)
        {
            var p = DeviceConfiguration.Profiles[i];
            var s = ActiveProfile.Streams[i];
            p.Stream      = s.Stream;
            p.Format      = s.Format;
            p.Framerate   = s.Framerate;
            p.StreamIndex = s.Index;
            var vs = s as VideoStreamProfile;
            if (vs != null)
            {
                p.Width  = vs.Width;
                p.Height = vs.Height;
            }
            DeviceConfiguration.Profiles[i] = p;
        }


        if (processMode == ProcessMode.Multithread)
        {
            stopEvent.Reset();
            worker = new Thread(WaitForFrames);
            worker.IsBackground = true;
            worker.Start();
        }

        StartCoroutine(WaitAndStart());
    }
    protected override void OnStartStreaming(PipelineProfile activeProfile)
    {
        aligner = new Align(alignTo);

        var profile = activeProfile.Streams.FirstOrDefault(p => p.Stream == alignTo);

        if (profile == null)
        {
            Debug.LogWarningFormat("Stream {0} not in active profile", sourceStreamType);
            return;
        }
        var videoProfile = profile as VideoStreamProfile;

        texture = new Texture2D(videoProfile.Width, videoProfile.Height, textureFormat, false, true)
        {
            wrapMode   = TextureWrapMode.Clamp,
            filterMode = filterMode
        };
        texture.Apply();
        textureBinding.Invoke(texture);

        RealSenseDevice.Instance.onNewSampleSet += OnFrameSet;
    }
        private void SetupWindow(PipelineProfile pipelineProfile, out Action <VideoFrame> depth, out Action <VideoFrame> color, out Action <VideoFrame> ir1, out Action <VideoFrame> ir2)
        {
            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>())
                imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            depth = UpdateImage(imgDepth);

            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Color).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            color = UpdateImage(imgColor);

            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Infrared).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            ir1 = UpdateImage(imgIRleft);

            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Infrared).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            ir2 = UpdateImage(imgIRright);


            //using (var p = pipelineProfile.GetStream(Stream.Color).As<VideoStreamProfile>())
            //    ip.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            //iproc = UpdateImage(ip);
        }
Example #4
0
        private void CameraStart()
        {
            // Setup config settings
            var cfg = new Config();

            cfg.EnableStream(Stream.Depth, 1280, 720, Format.Z16, 30);
            cfg.EnableStream(Stream.Color, 1280, 720, Format.Bgr8, 30);

            // Pipeline start
            Pipeline        pipeline = new Pipeline();
            PipelineProfile pp       = pipeline.Start(cfg);

            using (var p = pp.GetStream(Stream.Color) as VideoStreamProfile)
                Img_main.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, System.Windows.Media.PixelFormats.Rgb24, null);
            Action <VideoFrame> updateColor = UpdateImage(Img_main);

            // Setup filter / alignment settings
            SetupFilters(out Colorizer colorizer, out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out Align align_to);
            // Setup frame processing
            CustomProcessingBlock processingBlock = SetupProcessingBlock(pipeline, colorizer, decimate, spatial, temp, holeFill, align_to);

            // Start frame processing
            StartProcessingBlock(processingBlock, pp, updateColor, pipeline);
        }
Example #5
0
    public RealSenseSource()
    {
        using (Context ctx = new Context())
        {
            DeviceList devices = ctx.QueryDevices();

            Console.WriteLine("There are {0} connected RealSense devices.", devices.Count);

            MAssert.Check(devices.Count > 0, "there is no RealSense devices");

            for (int i = 0; i < devices.Count; ++i)
            {
                Console.WriteLine(
                    "Device {0}: name {1}, serial {2}, firmware version: {3}",
                    i,
                    devices[i].Info[CameraInfo.Name],
                    devices[i].Info[CameraInfo.SerialNumber],
                    devices[i].Info[CameraInfo.FirmwareVersion]);
            }
        }

        Config config = new Config();

        config.EnableStream(Stream.Depth, 640, 480, Format.Z16);
        config.EnableStream(Stream.Color, 640, 480, Format.Bgr8);

        pipeline = new Pipeline();

        PipelineProfile pipeline_profile = pipeline.Start(config);

        depth_scale = pipeline_profile.Device.Sensors[0].DepthScale * 1000;

        MAssert.Check(depth_scale > 0);

        align_to_color = new Align(Stream.Color);
    }
Example #6
0
 public void StartWithCfg()
 {
     profile = pipeline.Start(currentConfig);
 }
        private void SetupWindow(out Action <VideoFrame> depth, out Action <VideoFrame> color, PipelineProfile pp)
        {
            //Display Depth
            using (VideoStreamProfile p = pp.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>())
                imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            depth = UpdateImage(imgDepth);

            //Display Color
            using (VideoStreamProfile p = pp.GetStream(Intel.RealSense.Stream.Color).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            color = UpdateImage(imgColor);
        }
Example #8
0
    private void OnStartStreaming(PipelineProfile activeProfile)
    {
        pc = new PointCloud();

        using (var profile = activeProfile.GetStream(stream))
        {
            if (profile == null)
            {
                Debug.LogWarningFormat("Stream {0} not in active profile", stream);
            }
        }

        using (var profile = activeProfile.GetStream(Stream.Depth) as VideoStreamProfile)
        {
            intrinsics = profile.GetIntrinsics();

            Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));
            uvmap = new Texture2D(profile.Width, profile.Height, TextureFormat.RGFloat, false, true)
            {
                wrapMode   = TextureWrapMode.Clamp,
                filterMode = FilterMode.Point,
            };
            GetComponent <MeshRenderer>().sharedMaterial.SetTexture("_UVMap", uvmap);

            if (mesh != null)
            {
                Destroy(mesh);
            }

            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            };

            vertices    = new Vector3[profile.Width * profile.Height];
            handle      = GCHandle.Alloc(vertices, GCHandleType.Pinned);
            verticesPtr = handle.AddrOfPinnedObject();

            var indices = new int[vertices.Length];
            for (int i = 0; i < vertices.Length; i++)
            {
                indices[i] = i;
            }

            mesh.MarkDynamic();
            mesh.vertices = vertices;

            var uvs = new Vector2[vertices.Length];
            Array.Clear(uvs, 0, uvs.Length);
            var invSize = new Vector2(1f / profile.Width, 1f / profile.Height);
            for (int j = 0; j < profile.Height; j++)
            {
                for (int i = 0; i < profile.Width; i++)
                {
                    uvs[i + j * profile.Width].x = i * invSize.x;
                    uvs[i + j * profile.Width].y = j * invSize.y;
                }
            }

            mesh.uv = uvs;

            mesh.SetIndices(indices, MeshTopology.Points, 0, false);
            mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

            GetComponent <MeshFilter>().sharedMesh = mesh;
        }

        RealSenseDevice.Instance.onNewSampleSet += OnFrames;
    }
Example #9
0
        private void StartProcessingBlock(CustomProcessingBlock processingBlock, PipelineProfile pp, Action <VideoFrame> updateColor, Pipeline pipeline)
        {
            float[,,] posMap = new float[1280, 720, 3];

            Size RS_depthSize = new Size(1280, 720);
            Mat  processMat   = new Mat(RS_depthSize, DepthType.Cv8U, 3);

            processingBlock.Start(f =>
            {
                using (var frames = FrameSet.FromFrame(f))
                {
                    //var color_frame = frames.ColorFrame.DisposeWith(frames);
                    //color_frame.CopyTo(processMat.DataPointer);

                    var depthintr   = (pp.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics();
                    var depth_frame = frames.DepthFrame.DisposeWith(frames);

                    //float depth = depth_frame.GetDistance((int)thePoint.X,(int)thePoint.Y); //From
                    //thePos = HelperClass.DeprojectPixelToPoint(depthintr, thePoint, depth);

                    unsafe
                    {
                        Int16 *pixelPtr_byte = (Int16 *)depth_frame.Data;
                        for (int i = 0; i < 1280; i++)
                        {
                            for (int j = 0; j < 720; j++)
                            {
                                var tmpF        = HelperClass.DeprojectPixelToPoint(depthintr, new PointF(i, j), (float)pixelPtr_byte[j * 1280 + i] / 1000f);
                                posMap[i, j, 0] = tmpF[0];
                                posMap[i, j, 1] = tmpF[1];
                                posMap[i, j, 2] = tmpF[2];
                            }
                        }
                    }
                    // Dispatcher.Invoke(DispatcherPriority.Render, updateColor, color_frame);//顯示用
                }
            });


            //start
            var token = _tokenSource.Token;
            Action <VideoFrame> updateOriginColor = UpdateImage(Img_main);
            var t = Task.Factory.StartNew(() =>
            {
                Mat color_orig   = new Mat(RS_depthSize, DepthType.Cv8U, 3);
                Mat color_resize = new Mat(RS_depthSize, DepthType.Cv8U, 3);

                yoloWrapper = new YoloWrapper("modle\\yolov3-tiny-3obj.cfg", "modle\\yolov3-tiny-3obj_3cup.weights", "modle\\obj.names");
                string detectionSystemDetail = string.Empty;
                if (!string.IsNullOrEmpty(yoloWrapper.EnvironmentReport.GraphicDeviceName))
                {
                    detectionSystemDetail = $"({yoloWrapper.EnvironmentReport.GraphicDeviceName})";
                }
                Console.WriteLine($"Detection System:{yoloWrapper.DetectionSystem}{detectionSystemDetail}");

                while (!token.IsCancellationRequested)
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        if (showType == imgType.color)
                        {
                            VideoFrame color_frame = frames.ColorFrame.DisposeWith(frames);
                            color_frame.CopyTo(color_orig.DataPointer);

                            CvInvoke.WarpPerspective(color_orig, color_resize, matrix, new Size(1280, 720));


                            //CvInvoke.Imwrite("yolo1.png", color_resize);
                            //try { items = yoloWrapper.Detect(@"yolo1.png"); }
                            //catch { break; }
                            CvInvoke.Imwrite("yolo2.png", color_orig);
                            try { items = yoloWrapper.Detect(@"yolo2.png"); }
                            catch { break; }

                            CvInvoke.CvtColor(color_resize, color_resize, ColorConversion.Bgr2Rgb);
                            processingBlock.ProcessFrames(frames);

                            foreach (YoloItem item in items)
                            {
                                string name = item.Type;
                                //int x = item.X;
                                //int y = item.Y;
                                mapToRsImg(item.X, item.Y, out int x, out int y);
                                mapToRsImg(item.X + item.Width, item.Y + item.Height, out int x2, out int y2);

                                //int H = item.Height;
                                //int W = item.Width;
                                int H = y2 - y;
                                int W = x2 - x;

                                mapToRsImg(item.Center().X, item.Center().Y, out int Cx, out int Cy);
                                //Point center = item.Center();
                                Point center = new Point(Cx, Cy);

                                int evilLine   = 500;
                                int evilLinex1 = 580;
                                int evilLinex2 = 660;
                                if (showEvilLine)
                                {
                                    CvInvoke.Line(color_resize, new Point(0, evilLine), new Point(1280, evilLine), new MCvScalar(100, 100, 250), 2);         //以上代表可能在咖啡機
                                    CvInvoke.Line(color_resize, new Point(evilLinex1, 0), new Point(evilLinex1, evilLine), new MCvScalar(100, 100, 250), 2); //
                                    CvInvoke.Line(color_resize, new Point(evilLinex2, 0), new Point(evilLinex2, evilLine), new MCvScalar(100, 100, 250), 2); //
                                }

                                if (y > evilLine || x < evilLinex1 || x > evilLinex2) //代不再咖啡機附近
                                {
                                    if (item.Confidence < 0.5)                        //沒信心的東西就跳過,避免偵測到其他東西
                                    {
                                        continue;
                                    }
                                }


                                float[] objPos = new float[] { posMap[center.X, center.Y, 0], posMap[center.X, center.Y, 1], posMap[center.X, center.Y, 2] };

                                if (objPos[0] == 0 || objPos[1] == 0 || objPos[2] == 0)//不然會影響平均
                                {
                                    continue;
                                }

                                if (name == "blue cup")//index 0
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 > clr.V2)//R>B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    //  CvInvoke.PutText(color_resize, "B", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(80, 150, 220), 3);

                                    process_actionOfCups(cups[0], mat_cup, TB_Bcup_msg, TB_Bcup_state, objPos, 10, 40);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Bcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                                else if (name == "pink cup")//index 1
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 < clr.V2)//R<B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    // CvInvoke.PutText(color_resize, "P", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(250, 80, 80), 3);

                                    process_actionOfCups(cups[1], mat_cup, TB_Pcup_msg, TB_Pcup_state, objPos, 60, 90);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Pcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                            }//foreach cups
                            timeTick++;
                            this.Dispatcher.Invoke((Action)(() =>
                            {
                                img_cupState.Source = BitmapSourceConvert.ToBitmapSource(mat_cup);
                            }));
                            color_frame.CopyFrom(color_resize.DataPointer);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateOriginColor, color_frame);
                        }
                        else if (showType == imgType.mix)//顯示 mix 圖
                        {
                            processingBlock.ProcessFrames(frames);
                        }
                    }
                }
            }, token);
        }
Example #10
0
    private void OnStartStreaming(PipelineProfile activeProfile)
    {
        pc          = new PointCloud();
        spatial     = new SpatialFilter();
        temporal    = new TemporalFilter();
        holeFilling = new HoleFillingFilter();

        using (var profile = activeProfile.GetStream(stream))
        {
            if (profile == null)
            {
                Debug.LogWarningFormat("Stream {0} not in active profile", stream);
            }
        }

        using (var profile = activeProfile.GetStream(Stream.Depth) as VideoStreamProfile)
        {
            intrinsics = profile.GetIntrinsics();

            Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));


            numParticles = (profile.Width - 1) * (profile.Height - 1) * 2;

            vertices    = new Vector3[profile.Width * profile.Height];
            handle      = GCHandle.Alloc(vertices, GCHandleType.Pinned);
            verticesPtr = handle.AddrOfPinnedObject();

            var indices = new int[(profile.Width - 1) * (profile.Height - 1) * 6];

            var iIdx = 0;
            for (int j = 0; j < profile.Height; j++)
            {
                for (int i = 0; i < profile.Width; i++)
                {
                    if (i < profile.Width - 1 && j < profile.Height - 1)
                    {
                        var idx = i + j * profile.Width;
                        var y   = profile.Width;
                        indices[iIdx++] = idx + 0;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + 1;

                        indices[iIdx++] = idx + 1;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + y + 1;
                    }
                }
            }

            particleBuffer = new ComputeBuffer(numParticles, Marshal.SizeOf(typeof(VoxelParticle)));
            vertexBuffer   = new ComputeBuffer(vertices.Length, sizeof(float) * 3);
            indicesBuffer  = new ComputeBuffer(indices.Length, sizeof(int));

            vertexBuffer.SetData(vertices);
            indicesBuffer.SetData(indices);
            renderer.SetBuffer("_VoxelBuffer", particleBuffer);

            ResetParticle();

            if (mesh != null)
            {
                Destroy(mesh);
            }

            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            };
            mesh.MarkDynamic();

            mesh.vertices = new Vector3[numParticles];
            var newIdices = Enumerable.Range(0, numParticles).ToArray();

            mesh.SetIndices(newIdices, MeshTopology.Points, 0, false);
            mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

            GetComponent <MeshFilter>().sharedMesh = mesh;
        }

        RealSenseDevice.Instance.onNewSampleSet += OnFrames;
    }
Example #11
0
 /// <summary>
 /// Gets the coordinate mapper of the current pipline profile.
 /// </summary>
 /// <param name="pipeline">The current pipeline.</param>
 /// <param name="colorWidth">The desired color frame width.</param>
 /// <param name="colorHeight">The desired color frame height.</param>
 /// <param name="depthWidth">The desired depth frame width.</param>
 /// <param name="depthHeight">The desired depth frame height.</param>
 /// <returns>The color/depth coordinate mapper of the current pipline, if all of the supported streams were found. Null otherwise.</returns>
 public static CoordinateMapper GetCoordinateMapper(this PipelineProfile pipeline, int colorWidth, int colorHeight, int depthWidth, int depthHeight)
 {
     return(CoordinateMapper.Create(pipeline, colorWidth, colorHeight, depthWidth, depthHeight));
 }
 public void OnStartStreaming(PipelineProfile activeProfile)
 {
     q                   = new FrameQueue(1);
     matcher             = new Predicate <Frame>(Matches);
     Source.OnNewSample += OnNewSample;
 }
Example #13
0
        /********************************************************************
         * 下面是realsense控制部分
         *********************************************************************/


        /**打开realsense
         * **/
        public void Start()
        {
            profile = pipeline.Start();
        }
        void Init()
        {
            try
            {
                #region FILTERS

                spatialFilter = new SpatialFilter();
                spatialFilter.Options[Option.FilterMagnitude].Value   = 5.0F;
                spatialFilter.Options[Option.FilterSmoothAlpha].Value = 0.25F;
                spatialFilter.Options[Option.FilterSmoothDelta].Value = 50.0F;

                decimationFilter = new DecimationFilter();
                decimationFilter.Options[Option.FilterMagnitude].Value = 2.0F;

                holeFilter = new HoleFillingFilter();

                thresholdFilter = new ThresholdFilter();
                //thresholdFilter.Options[Option.MinDistance].Value = 0.73F;
                //thresholdFilter.Options[Option.MaxDistance].Value = 0.81F;

                #endregion

                align_to  = new Align(Intel.RealSense.Stream.Depth);
                colorizer = new Colorizer();
                pipeline  = new Pipeline();

                //CONFIG SETTINGS
                var cfg = new Config();
                cfg.EnableStream(Intel.RealSense.Stream.Depth, resolutionW, resolutionH, Format.Z16, FPS); //depth resolution manuel change
                cfg.EnableStream(Intel.RealSense.Stream.Color, 640, 480, Format.Rgb8, 30);
                pipelineProfile = pipeline.Start(cfg);                                                     //stream starting with user config

                var advancedDevice = AdvancedDevice.FromDevice(pipelineProfile.Device);                    //connected device
                //read device's configuration settings from json file
                advancedDevice.JsonConfiguration = File.ReadAllText(@"CustomConfig.json");
                selectedDevice = pipelineProfile.Device;

                #region Field Of View Info

                float[] dfov, cfov, irfov;

                var        depth_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth);
                Intrinsics depthIntr    = depth_stream.GetIntrinsics();
                dfov = depthIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                var        color_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Color);
                Intrinsics colorIntr    = color_stream.GetIntrinsics();
                cfov = colorIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                var        ir_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Infrared);
                Intrinsics irIntr    = ir_stream.GetIntrinsics();
                irfov = irIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                lblDepthFov.Text    = "Depth FOV : " + "H = " + Convert.ToInt32(dfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(dfov[1]).ToString() + "°";
                lblColorFov.Text    = "RGB FOV   : " + "H = " + Convert.ToInt32(cfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(cfov[1]).ToString() + "°";
                lblInfraredFov.Text = "IR FOV   : " + "H = " + Convert.ToInt32(irfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(irfov[1]).ToString() + "°";


                #endregion


                //get primary screen resolutions
                screenWidth  = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenWidth.ToString());
                screenHeight = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenHeight.ToString());

                //camera started working. transfer image to interface
                SetupWindow(pipelineProfile, out updateDepth, out updateColor, out updateIR1, out updateIR2);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Example #15
0
    private void OnStartStreaming(PipelineProfile obj)
    {
        q = new FrameQueue(1);

        using (var depth = obj.GetStream(Stream.Depth) as VideoStreamProfile)
        {
            Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));


            numParticles = (depth.Width - 1) * (depth.Height - 1) * 2;

            vertices    = new Vector3[depth.Width * depth.Height];
            handle      = GCHandle.Alloc(vertices, GCHandleType.Pinned);
            verticesPtr = handle.AddrOfPinnedObject();

            var indices = new int[(depth.Width - 1) * (depth.Height - 1) * 6];

            var iIdx = 0;
            for (int j = 0; j < depth.Height; j++)
            {
                for (int i = 0; i < depth.Width; i++)
                {
                    if (i < depth.Width - 1 && j < depth.Height - 1)
                    {
                        var idx = i + j * depth.Width;
                        var y   = depth.Width;
                        indices[iIdx++] = idx + 0;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + 1;

                        indices[iIdx++] = idx + 1;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + y + 1;
                    }
                }
            }

            particleBuffer = new ComputeBuffer(numParticles, Marshal.SizeOf(typeof(VoxelParticle)));
            vertexBuffer   = new ComputeBuffer(vertices.Length, sizeof(float) * 3);
            indicesBuffer  = new ComputeBuffer(indices.Length, sizeof(int));

            vertexBuffer.SetData(vertices);
            indicesBuffer.SetData(indices);
            renderer.SetBuffer("_VoxelBuffer", particleBuffer);

            SetMotionParticle();
            ResetParticle();

            if (mesh != null)
            {
                Destroy(mesh);
            }

            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            };
            mesh.MarkDynamic();

            mesh.vertices = new Vector3[numParticles];
            var newIdices = Enumerable.Range(0, numParticles).ToArray();

            mesh.SetIndices(newIdices, MeshTopology.Points, 0, false);
            mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

            GetComponent <MeshFilter>().sharedMesh = mesh;
        }

        source.OnNewSample += OnNewSample;
    }
 /// <summary>
 /// Creates a new coordinate mapper for the specified pipeline.
 /// </summary>
 /// <param name="pipeline">The specified pipeline.</param>
 /// <returns>The coordinate mapper of the current pipline, if all of the supported streams were found. Null otherwise.</returns>
 public static CoordinateMapper Create(PipelineProfile pipeline)
 {
     return(Create(pipeline, DefaultColorWidth, DefaultColorHeight, DefaultDepthWidth, DefaultDepthHeight));
 }
Example #17
0
        private void StartCapture(int networkHeight, ComputationMode computationMode)
        {
            try {
                bool bDevicesFound = QueryRealSenseDevices();
                if (bDevicesFound == false)
                {
                    Console.WriteLine("Cannot start acquisition as no RealSense is connected.");
                    toggleStartStop.IsChecked    = false;
                    this.toggleStartStop.Content = "\uF5B0";
                    // Enable all controls
                    this.computationBackend.IsEnabled = true;
                    this.networkSlider.IsEnabled      = true;
                    // Stop demo

                    string acq_msg = string.Format("Acquisition Status:\t OFFLINE");
                    acquisition_status.Dispatcher.BeginInvoke((Action) delegate { acquisition_status.Text = acq_msg; });
                    return;
                }

                // get the selected image width and height
                int nImageWidth  = sensorResolutions[resolutionOptionBox.SelectedIndex].Width;
                int nImageHeight = sensorResolutions[resolutionOptionBox.SelectedIndex].Height;

                Console.WriteLine(
                    string.Format("Enabling the {0} S.No: {1}",
                                  availableDevices[camera_source.SelectedIndex].Info[CameraInfo.Name],
                                  availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber]));
                Console.WriteLine(
                    string.Format("Selected resolution for the image acquisition is {0}x{1}", nImageWidth, nImageHeight));
                Console.WriteLine(string.Format("Selected network size: {0} along with {1} as the computation device",
                                                networkHeight,
                                                computationMode));
                selectedDeviceSerial = availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber];
                // Create and config the pipeline to stream color and depth frames.
                cfg.EnableDevice(availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber]);
                cfg.EnableStream(Intel.RealSense.Stream.Color, nImageWidth, nImageHeight, Format.Bgr8, framerate: 30);
                cfg.EnableStream(Intel.RealSense.Stream.Depth, nImageWidth, nImageHeight, framerate: 30);

                Task.Factory.StartNew(() => {
                    try {
                        // Create and config the pipeline to stream color and depth frames.
                        pp = pipeline.Start(cfg);
                        intrinsicsDepthImagerMaster =
                            (pp.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>()).GetIntrinsics();

                        // Initialise cubemos DNN framework with the required deep learning model and the target compute
                        // device. Currently CPU and GPU are supported target devices. FP32 model is necessary for the
                        // CPU and FP16 model is required by the Myriad device and GPU

                        Cubemos.SkeletonTracking.Api skeletontrackingApi;

                        String cubemosModelDir = Common.DefaultModelDir();

                        var computeDevice = Cubemos.TargetComputeDevice.CM_CPU;
                        var modelFile     = cubemosModelDir + "\\fp32\\skeleton-tracking.cubemos";

                        if (computationMode == ComputationMode.GPU)
                        {
                            computeDevice = Cubemos.TargetComputeDevice.CM_GPU;
                            modelFile     = cubemosModelDir + "\\fp16\\skeleton-tracking.cubemos";
                        }
                        else if (computationMode == ComputationMode.MYRIAD)
                        {
                            computeDevice = Cubemos.TargetComputeDevice.CM_MYRIAD;
                            modelFile     = cubemosModelDir + "\\fp16\\skeleton-tracking.cubemos";
                        }

                        var licenseFolder = Common.DefaultLicenseDir();
                        try {
                            skeletontrackingApi = new SkeletonTracking.Api(licenseFolder);
                        }
                        catch (Exception ex) {
                            throw new Cubemos.Exception(
                                String.Format("Activation key or license key not found in {0}.\n " +
                                              "If you haven't activated the SDK yet, please run post_installation script as described in the Getting Started Guide to activate your license.",
                                              licenseFolder));
                        }

                        try {
                            skeletontrackingApi.LoadModel(computeDevice, modelFile);
                        }
                        catch (Exception ex) {
                            if (File.Exists(modelFile))
                            {
                                throw new Cubemos.Exception(
                                    "Internal error occured during model initialization. Please make sure your compute device satisfies the hardware system requirements.");
                            }
                            else
                            {
                                throw new Cubemos.Exception(
                                    string.Format("Model file \"{0}\" not found. Details: \"{1}\"", modelFile, ex));
                            }
                        }

                        Console.WriteLine("Finished initialization");

                        Stopwatch fpsStopwatch = new Stopwatch();
                        double fps             = 0.0;
                        int nFrameCnt          = 0;

                        bool firstRun = true;

                        Console.WriteLine("Starting image acquisition and skeleton keypoints");
                        while (!tokenSource.Token.IsCancellationRequested)
                        {
                            int pipelineID = 1;
                            if (bEnableTracking == false)
                            {
                                pipelineID = 0;
                            }

                            fpsStopwatch.Restart();

                            // We wait for the next available FrameSet and using it as a releaser object that would
                            // track all newly allocated .NET frames, and ensure deterministic finalization at the end
                            // of scope.
                            using (var releaser = new FramesReleaser())
                            {
                                using (var frames = pipeline.WaitForFrames())
                                {
                                    if (frames.Count != 2)
                                    {
                                        Console.WriteLine("Not all frames are available...");
                                    }

                                    var f = frames.ApplyFilter(align).DisposeWith(releaser).AsFrameSet().DisposeWith(
                                        releaser);

                                    var colorFrame = f.ColorFrame.DisposeWith(releaser);
                                    depthFrame     = f.DepthFrame.DisposeWith(releaser);

                                    var alignedDepthFrame = align.Process <DepthFrame>(depthFrame).DisposeWith(f);

                                    if (temporalFilterEnabled)
                                    {
                                        alignedDepthFrame = temp.Process <DepthFrame>(alignedDepthFrame).DisposeWith(f);
                                    }

                                    // We colorize the depth frame for visualization purposes
                                    var colorizedDepth =
                                        colorizer.Process <VideoFrame>(alignedDepthFrame).DisposeWith(f);

                                    // Preprocess the input image
                                    Bitmap inputImage    = FrameToBitmap(colorFrame);
                                    Bitmap inputDepthMap = FrameToBitmap((VideoFrame)colorizedDepth);

                                    // Run the inference on the preprocessed image
                                    List <SkeletonKeypoints> skeletonKeypoints;
                                    skeletontrackingApi.RunSkeletonTracking(
                                        ref inputImage, networkHeight, out skeletonKeypoints, pipelineID);

                                    if (firstRun)
                                    {
                                        Cnv2.Dispatcher.BeginInvoke((Action) delegate { Panel.SetZIndex(Cnv2, -1); },
                                                                    System.Windows.Threading.DispatcherPriority.Render);

                                        toggleStartStop.Dispatcher.BeginInvoke(
                                            (Action) delegate { toggleStartStop.IsEnabled = true; });

                                        firstRun = false;
                                    }

                                    Bitmap displayImage;
                                    if (bShowOnlySkeletons)
                                    {
                                        displayImage = new Bitmap(inputImage.Width, inputImage.Height);
                                        using (Graphics g = Graphics.FromImage(displayImage))
                                        {
                                            g.Clear(System.Drawing.Color.Black);
                                        }
                                    }
                                    else
                                    {
                                        displayImage = new Bitmap(inputImage);
                                    }

                                    Graphics graphics = Graphics.FromImage(displayImage);

                                    // Render the correct skeletons detected from the inference
                                    if (true == bRenderSkeletons)
                                    {
                                        renderSkeletons(
                                            skeletonKeypoints, nImageWidth, nImageHeight, bEnableTracking, graphics);
                                    }

                                    if (true == bRenderCoordinates)
                                    {
                                        renderCoordinates(skeletonKeypoints, nImageWidth, graphics);
                                    }

                                    if (false == bHideRenderImage)   // Render the final frame onto the display window
                                    {
                                        imgColor.Dispatcher.BeginInvoke(renderDelegate, imgColor, displayImage);
                                    }
                                    if (true == bRenderDepthMap)   // Overlay the depth map onto the display window
                                    {
                                        imgColor.Dispatcher.BeginInvoke(renderDelegate, imgDepth, inputDepthMap);
                                    }

                                    nFrameCnt++;
                                    fps += (double)(1000.0 / (double)fpsStopwatch.ElapsedMilliseconds);

                                    if (nFrameCnt % 25 == 0)
                                    {
                                        string msg = String.Format("FPS:\t\t\t{0:F2}", fps / nFrameCnt);
                                        fps_output.Dispatcher.BeginInvoke((Action) delegate { fps_output.Text = msg; });
                                        fps       = 0;
                                        nFrameCnt = 0;
                                    }

                                    string msg_person_count =
                                        string.Format("Person Count:\t\t{0}", skeletonKeypoints.Count);
                                    person_count.Dispatcher.BeginInvoke(
                                        (Action) delegate { person_count.Text = msg_person_count; });
                                }
                            }
                        }
                    }
                    catch (System.Exception exT) {
                        string errorMsg = string.Format(
                            "Internal Error Occured. Application will now close.\nError Details:\n\n\"{0}\"",
                            exT.Message);
                        Cnv2.Dispatcher.BeginInvoke(
                            new InfoDialog.ShowInfoDialogDelegate(InfoDialog.ShowInfoDialog), "Error", errorMsg);
                    }
                }, tokenSource.Token);
            }
            catch (System.Exception ex) {
                string errorMsg = string.Format(
                    "Internal Error Occured. Application will now close.\nError Details:\n\n\"{0}\"", ex.Message);
                Cnv2.Dispatcher.BeginInvoke(
                    new InfoDialog.ShowInfoDialogDelegate(InfoDialog.ShowInfoDialog), "Error", errorMsg);
            }
        }
Example #18
0
 /// <summary>
 /// Gets the coordinate mapper of the current pipline profile.
 /// </summary>
 /// <param name="pipeline">The current pipeline.</param>
 /// <returns>The color/depth coordinate mapper of the current pipline, if all of the supported streams were found. Null otherwise.</returns>
 public static CoordinateMapper GetCoordinateMapper(this PipelineProfile pipeline)
 {
     return(CoordinateMapper.Create(pipeline));
 }
Example #19
0
        private void RunThread(CancellationToken token)
        {
            Intel.RealSense.PointCloud pc = new Intel.RealSense.PointCloud();

            DecimationFilter dec_filter  = new DecimationFilter();
            SpatialFilter    spat_filter = new SpatialFilter();
            TemporalFilter   temp_filter = new TemporalFilter();

            dec_filter.Options[Option.FilterMagnitude].Value = DecimationMagnitude;

            spat_filter.Options[Option.FilterMagnitude].Value   = SpatialMagnitude;
            spat_filter.Options[Option.FilterSmoothAlpha].Value = Math.Min(1.0f, (float)SpatialSmoothAlpha);
            spat_filter.Options[Option.FilterSmoothDelta].Value = (float)SpatialSmoothDelta;

            temp_filter.Options[Option.FilterSmoothAlpha].Value = Math.Min(1.0f, (float)TemporalSmoothAlpha);
            temp_filter.Options[Option.FilterSmoothDelta].Value = (float)TemporalSmoothDelta;

            List <ProcessingBlock> filters = new List <ProcessingBlock> {
                dec_filter, spat_filter, temp_filter
            };
            Align align_to_depth = new Align(Stream.Depth);

            var cfg = new Config();

            cfg.EnableStream(Stream.Depth, 640, 480);
            cfg.EnableStream(Stream.Color, 1280, 720, Format.Rgb8);

            var             pipeline = new Pipeline();
            PipelineProfile pp       = null;

            try
            {
                pp = pipeline.Start(cfg);
            }
            catch (Exception e)
            {
                RhinoApp.WriteLine("RsToolkit: " + e.Message);
                return;
            }

            while (!token.IsCancellationRequested)
            {
                try
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        var aligned = align_to_depth.Process <FrameSet>(frames).DisposeWith(frames);
                        var color   = aligned.ColorFrame.DisposeWith(frames);

                        pc.MapTexture(color);

                        var filtered = aligned[Stream.Depth].DisposeWith(frames);

                        foreach (var filter in filters)
                        {
                            filtered = filter.Process(filtered).DisposeWith(frames);
                        }

                        Points points = pc.Process <Points>(filtered);

                        var vertices   = new Point3f[points.Count];
                        var tex_coords = new Point2f[points.Count];

                        points.CopyVertices <Point3f>(vertices);
                        points.CopyTextureCoords <Point2f>(tex_coords);

                        Debug.Assert(vertices.Length == tex_coords.Length);

                        // ======== CULL INVALID POINTS ========

                        if (true)
                        {
                            var flags    = new bool[vertices.Length];
                            int new_size = 0;
                            for (int i = 0; i < vertices.Length; ++i)
                            {
                                if (vertices[i].Z > 0.1)
                                {
                                    flags[i] = true;
                                    new_size++;
                                }
                            }

                            var new_vertices   = new Point3f[new_size];
                            var new_tex_coords = new Point2f[new_size];

                            for (int i = 0, j = 0; i < vertices.Length; ++i)
                            {
                                if (flags[i])
                                {
                                    new_vertices[j]   = vertices[i];
                                    new_tex_coords[j] = tex_coords[i];
                                    ++j;
                                }
                            }

                            vertices   = new_vertices;
                            tex_coords = new_tex_coords;
                        }

                        // ======== TRANSFORM ========

                        if (m_xform.IsValid)
                        {
                            Parallel.For(0, vertices.Length - 1, (i) =>
                            {
                                vertices[i].Transform(m_xform);
                            });
                        }

                        // ======== CLIP TO BOX ========

                        if (m_clipping_box.IsValid &&
                            m_clipping_box.X.Length > 0 &&
                            m_clipping_box.Y.Length > 0 &&
                            m_clipping_box.Z.Length > 0)
                        {
                            Point3d box_centre = m_clipping_box.Plane.Origin;
                            double  minx = m_clipping_box.X.Min + box_centre.X, maxx = m_clipping_box.X.Max + box_centre.X;
                            double  miny = m_clipping_box.Y.Min + box_centre.Y, maxy = m_clipping_box.Y.Max + box_centre.Y;
                            double  minz = m_clipping_box.Z.Min + box_centre.Z, maxz = m_clipping_box.Z.Max + box_centre.Z;

                            var flags    = new bool[vertices.Length];
                            int new_size = 0;
                            for (int i = 0; i < vertices.Length; ++i)
                            {
                                if (
                                    vertices[i].X <maxx && vertices[i].X> minx &&
                                    vertices[i].Y <maxy && vertices[i].Y> miny &&
                                    vertices[i].Z <maxz && vertices[i].Z> minz
                                    )
                                {
                                    flags[i] = true;
                                    new_size++;
                                }
                            }

                            var new_vertices   = new Point3f[new_size];
                            var new_tex_coords = new Point2f[new_size];

                            for (int i = 0, j = 0; i < vertices.Length; ++i)
                            {
                                if (flags[i])
                                {
                                    new_vertices[j]   = vertices[i];
                                    new_tex_coords[j] = tex_coords[i];
                                    ++j;
                                }
                            }

                            vertices   = new_vertices;
                            tex_coords = new_tex_coords;
                        }

                        Debug.Assert(vertices.Length == tex_coords.Length);

                        var point_colors = GetPointColors(color, tex_coords);

                        RPointCloud new_pointcloud = new RPointCloud();
                        new_pointcloud.AddRange(vertices.Select(x => new Point3d(x)), point_colors);

                        lock (m_pointcloud)
                            m_pointcloud = new_pointcloud;
                    }
                }
                catch (Exception e)
                {
                    RhinoApp.WriteLine("RsToolkit: " + e.Message);
                    m_is_on = false;
                    break;
                }
            }

            RhinoApp.WriteLine("RsToolkit: Task cancelled.");

            if (pipeline != null)
            {
                pipeline.Stop();
            }
        }
        public override KinectInterop.SensorData OpenSensor(KinectManager kinectManager, KinectInterop.FrameSource dwFlags, bool bSyncDepthAndColor, bool bSyncBodyAndDepth)
        {
            // save initial parameters
            base.OpenSensor(kinectManager, dwFlags, bSyncDepthAndColor, bSyncBodyAndDepth);

            // color settings
            int colorWidth = 0, colorHeight = 0, colorFps = 0;

            ParseCameraMode(colorCameraMode.ToString(), out colorWidth, out colorHeight, out colorFps);

            // depth settings
            int depthWidth = 0, depthHeight = 0, depthFps = 0;

            ParseCameraMode(depthCameraMode.ToString(), out depthWidth, out depthHeight, out depthFps);

            try
            {
                m_pipeline = new Pipeline();

                using (Config config = new Config())
                {
                    if (deviceStreamingMode == KinectInterop.DeviceStreamingMode.PlayRecording)
                    {
                        if (string.IsNullOrEmpty(recordingFile))
                        {
                            Debug.LogError("PlayRecording selected, but the path to recording file is missing.");
                            return(null);
                        }

                        if (!System.IO.File.Exists(recordingFile))
                        {
                            Debug.LogError("PlayRecording selected, but the recording file cannot be found: " + recordingFile);
                            return(null);
                        }

                        sensorPlatform = KinectInterop.DepthSensorPlatform.RealSense;
                        sensorDeviceId = KinectInterop.GetFileName(recordingFile, false);

                        // playback from file
                        if (consoleLogMessages)
                        {
                            Debug.Log("Playing back: " + recordingFile);
                        }
                        config.EnableDeviceFromFile(recordingFile, false);
                    }
                    else
                    {
                        // get the list of available sensors
                        List <KinectInterop.SensorDeviceInfo> alSensors = GetAvailableSensors();
                        if (deviceIndex >= alSensors.Count)
                        {
                            Debug.LogError("  D" + deviceIndex + " is not available. You can set the device index to -1, to disable it.");
                            return(null);
                        }

                        // sensor serial number
                        sensorPlatform = KinectInterop.DepthSensorPlatform.RealSense;
                        sensorDeviceId = alSensors[deviceIndex].sensorId;
                        config.EnableDevice(sensorDeviceId);

                        // color
                        if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
                        {
                            //Debug.Log(string.Format("Color camera mode: {0} x {1} @ {2} FPS", colorWidth, colorHeight, colorFps));
                            config.EnableStream(Stream.Color, -1, colorWidth, colorHeight, Format.Rgb8, colorFps);
                        }

                        // depth
                        if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
                        {
                            //Debug.Log(string.Format("Depth camera mode: {0} x {1} @ {2} FPS", depthWidth, depthHeight, depthFps));
                            config.EnableStream(Stream.Depth, -1, depthWidth, depthHeight, Format.Z16, depthFps);
                        }

                        // infrared
                        if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0 /**|| (dwFlags & KinectInterop.FrameSource.TypeBody) != 0*/)
                        {
                            //Debug.Log(string.Format("Infrared camera mode: {0} x {1} @ {2} FPS", depthWidth, depthHeight, depthFps));
                            config.EnableStream(Stream.Infrared, 1, depthWidth, depthHeight, Format.Y8, depthFps);
                            //config.EnableStream(Stream.Infrared, 2, depthWidth, depthHeight, Format.Y8, depthFps);
                        }

                        // pose
                        if ((dwFlags & KinectInterop.FrameSource.TypePose) != 0)
                        {
                            config.EnableStream(Stream.Pose, Format.SixDOF);
                        }

                        //// record to file
                        //if(deviceMode == KinectInterop.DepthSensorMode.CreateRecording && !string.IsNullOrEmpty(deviceFilePath))
                        //{
                        //    if (!string.IsNullOrEmpty(deviceFilePath))
                        //    {
                        //        config.EnableRecordToFile(deviceFilePath);
                        //    }
                        //    else
                        //    {
                        //        Debug.LogError("Record selected, but the path to recording file is missing.");
                        //    }
                        //}
                    }

                    activeProfile = m_pipeline.Start(config);
                }
            }
            catch (Exception ex)
            {
                Debug.LogError("RealSenseInterface: " + ex.ToString());
            }

            // check if the profile was successfully created
            if (activeProfile == null)
            {
                return(null);
            }

            KinectInterop.SensorData sensorData = new KinectInterop.SensorData();
            sensorData.sensorIntPlatform = sensorPlatform;

            // flip color & depth images vertically
            sensorData.colorImageScale    = new Vector3(-1f, -1f, 1f);
            sensorData.depthImageScale    = new Vector3(-1f, -1f, 1f);
            sensorData.infraredImageScale = new Vector3(-1f, -1f, 1f);
            sensorData.sensorSpaceScale   = new Vector3(-1f, -1f, 1f);

            // depth camera offset & matrix z-flip
            sensorRotOffset  = Vector3.zero;  // if for instance the depth camera is tilted downwards
            sensorRotFlipZ   = true;
            sensorRotIgnoreY = false;

            // color
            sensorData.colorImageWidth  = colorWidth;
            sensorData.colorImageHeight = colorHeight;

            sensorData.colorImageFormat = TextureFormat.RGB24;
            sensorData.colorImageStride = 3;  // 3 bytes per pixel

            if ((dwFlags & KinectInterop.FrameSource.TypeColor) != 0)
            {
                rawColorImage = new byte[sensorData.colorImageWidth * sensorData.colorImageHeight * 3];

                sensorData.colorImageTexture            = new Texture2D(sensorData.colorImageWidth, sensorData.colorImageHeight, TextureFormat.RGB24, false);
                sensorData.colorImageTexture.wrapMode   = TextureWrapMode.Clamp;
                sensorData.colorImageTexture.filterMode = FilterMode.Point;
            }

            // depth
            sensorData.depthImageWidth  = depthWidth;
            sensorData.depthImageHeight = depthHeight;

            if ((dwFlags & KinectInterop.FrameSource.TypeDepth) != 0)
            {
                rawDepthImage         = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight];
                sensorData.depthImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight];
            }

            // infrared
            if ((dwFlags & KinectInterop.FrameSource.TypeInfrared) != 0 || (dwFlags & KinectInterop.FrameSource.TypeBody) != 0)
            {
                rawInfraredImage1  = new byte[sensorData.depthImageWidth * sensorData.depthImageHeight];
                rawInfraredImage2  = new byte[sensorData.depthImageWidth * sensorData.depthImageHeight];
                rawInfraredImageBT = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight];

                rawInfraredImage         = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight];
                sensorData.infraredImage = new ushort[sensorData.depthImageWidth * sensorData.depthImageHeight];

                minInfraredValue = 0f;
                maxInfraredValue = 1000f;
            }

            if (consoleLogMessages)
            {
                Debug.Log("RealSense-sensor opened: " + sensorDeviceId);
            }

            return(sensorData);
        }
 private void OnStartStreaming(PipelineProfile profile)
 {
     q = new FrameQueue(1);
     Source.OnNewSample += OnNewSample;
 }
Example #22
0
        static void Main(string[] args)
        {
            Console.WriteLine("Initializing console Skeleton Tracking sample with RealSense ... ");

            // Initialize logging to output all messages with severity level INFO or higher to the console
            Cubemos.Api.InitialiseLogging(Cubemos.LogLevel.CM_LL_ERROR, bWriteToConsole: true);
            Cubemos.SkeletonTracking.Api skeletontrackingApi;

            // Create cubemos Skeleton tracking Api handle and specify the directory containing a cubemos_license.json file
            try
            {
                skeletontrackingApi = new Cubemos.SkeletonTracking.Api(Common.DefaultLicenseDir());
            }
            catch (Cubemos.Exception ex)
            {
                Console.WriteLine("If you haven't activated the SDK yet, please run post_installation script as described in the Getting Started Guide to activate your license.");
                Console.ReadLine();
                return;
            }

            // Initialise cubemos DNN framework with the required model
            try
            {
                skeletontrackingApi.LoadModel(Cubemos.TargetComputeDevice.CM_CPU,
                                              Common.DefaultModelDir() + "\\fp32\\skeleton-tracking.cubemos");
            }
            catch (Cubemos.Exception ex)
            {
                Console.WriteLine(String.Format("Error during model loading. " +
                                                "Please verify the model exists at the path {0}. Details: {1}", Common.DefaultModelDir() + "\\fp32\\skeleton-tracking.cubemos", ex));
                Console.ReadLine();
                return;
            }

            Console.Write("Hold the Intel RealSense with person(s) in scene and hit <ENTER>... ");
            Console.ReadLine();

            // Initialise the intel realsense pipeline as an acquisition device
            Pipeline pipeline = new Pipeline();
            Config   cfg      = new Config();
            Context  context  = new Intel.RealSense.Context();

            cfg.EnableStream(Intel.RealSense.Stream.Color, 1280, 720, Format.Bgr8, framerate: 30);
            PipelineProfile pp = pipeline.Start(cfg);

            // Set the network input size to 128 for faster inference
            int networkHeight = 128;

            // Acquire a single color frame and run Skeleton Tracking on it
            using (var frames = pipeline.WaitForFrames())
            {
                var frame = frames.ColorFrame.DisposeWith(frames);
                System.Drawing.Bitmap inputImage =
                    new System.Drawing.Bitmap(frame.Width,
                                              frame.Height,
                                              frame.Stride,
                                              System.Drawing.Imaging.PixelFormat.Format24bppRgb,
                                              frame.Data);

                System.Collections.Generic.List <Cubemos.SkeletonTracking.Api.SkeletonKeypoints> skeletonKeypoints;

                // Send inference request and get the skeletons
                skeletontrackingApi.RunSkeletonTracking(ref inputImage, networkHeight, out skeletonKeypoints);

                // Output detected skeletons
                Console.WriteLine("# Persons detected: " + skeletonKeypoints.Count);
                for (int skeleton_index = 0; skeleton_index < skeletonKeypoints.Count; skeleton_index++)
                {
                    var skeleton = skeletonKeypoints[skeleton_index];
                    Console.WriteLine("Skeleton #" + skeleton_index);
                    for (int joint_index = 0; joint_index < skeleton.listJoints.Count; joint_index++)
                    {
                        Cubemos.SkeletonTracking.Api.Coordinate coordinate = skeleton.listJoints[joint_index];
                        Console.WriteLine("\tJoint coordinate #" + joint_index + ": " + coordinate.x + "; " +
                                          coordinate.y);
                    }
                }
            }
            Console.Write("Press <Enter> to exit... ");
            Console.ReadLine();
        }