private void OnStartStreaming(PipelineProfile obj)
    {
        //		var ds = obj.Streams.FirstOrDefault(s => s.Stream == Stream.Depth) as VideoStreamProfile;
        var ds = obj.GetStream <VideoStreamProfile>(Stream.Depth, -1);

        if (ds != null)
        {
            depthIntrinsics = ds.GetIntrinsics();
            OnDepthCalibrationInit.Invoke(depthIntrinsics);
        }

//		var cs = obj.Streams.FirstOrDefault(s => s.Stream == Stream.Color) as VideoStreamProfile;
        var cs = obj.GetStream <VideoStreamProfile>(Stream.Color, -1);

        if (cs != null)
        {
            colorIntrinsics = cs.GetIntrinsics();
            OnColorCalibrationInit.Invoke(colorIntrinsics);
        }

        if (ds != null && cs != null)
        {
            depthToColorExtrinsics = ds.GetExtrinsicsTo(cs);
            OnDepthToColorCalibrationInit.Invoke(depthToColorExtrinsics);
        }

        // Unity camera FOV alignment with color intrinsics
        if (targetCamera)
        {
            targetCamera.fieldOfView = Mathf.Rad2Deg * 2 * Mathf.Atan2(colorIntrinsics.height / 2.0f, colorIntrinsics.fy);
        }
    }
        private void Init()
        {
            using Context ctx = new Context();
            var devices = ctx.QueryDevices();

            Console.WriteLine($"Found {devices.Count} RealSense devices connected.");
            if (devices.Count == 0)
            {
                throw new Exception("No RealSense device detected!");
            }

            Device dev = devices[0];

            Console.WriteLine($"Using device 0: {dev.Info[CameraInfo.Name]}");
            Console.WriteLine("Device Sources:");

            foreach (Sensor sensor in dev.Sensors)
            {
                Console.WriteLine($"Sensor found: {sensor.Info[CameraInfo.Name]}");
            }
            var cfg = new Config();

            cfg.EnableStream(Stream.Depth);
            cfg.EnableStream(Stream.Color, Format.Bgr8);

            intelPipe = new Intel.RealSense.Pipeline();
            PipelineProfile profileIntelPipe = intelPipe.Start(cfg);
            var             streamDepth      = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Depth);

            sicsDepth = streamDepth.GetIntrinsics();
            Console.WriteLine($"Depth Stream: {sicsDepth.width}X{sicsDepth.height}");

            var streamRBG = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Color);

            sicsRBG = streamRBG.GetIntrinsics();
            Console.WriteLine($"RBG Stream: {sicsRBG.width}X{sicsRBG.height}");

            Task.Run(() =>
            {
                while (true)
                {
                    try
                    {
                        using FrameSet frames = intelPipe.WaitForFrames();
                        using Frame frDepth   = frames.FirstOrDefault(Stream.Depth);
                        qDepth.Enqueue(frDepth);
                        using Frame frRBG = frames.FirstOrDefault(Stream.Color);
                        qRBG.Enqueue(frRBG);
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine(e.Message);
                    }
                }
            });
        }
Beispiel #3
0
        private void SetupWindow(PipelineProfile pipelineProfile, out Action <VideoFrame> depth, out Action <VideoFrame> color)
        {
            using (var p = pipelineProfile.GetStream(Stream.Depth).As <VideoStreamProfile>())
                imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            depth = UpdateDepthImage(imgDepth);

            using (var p = pipelineProfile.GetStream(Stream.Color).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            color = UpdateImage(imgColor);
        }
Beispiel #4
0
        /// <summary>
        /// 開啟RealSense相機並進行取像
        /// </summary>
        internal void Open(out Image <Bgr, byte> ColorImg,
                           out Image <Rgb, byte> DepthImg,
                           out Image <Rgb, byte> FilteredImg,
                           out VideoFrame color,
                           out DepthFrame depth,
                           out Frame filtered)
        {
            DepthImg = null; ColorImg = null; FilteredImg = null; color = null; depth = null; filtered = null;
            if (CamState != CameraState.Opened)
            {
                PipelineProfile = Camera.Start(cfg);                                                            // 以cfg設定並開始串流
                vsp             = PipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth); // 取得內部參數
                intrinsics      = vsp.GetIntrinsics();
                sp         = PipelineProfile.GetStream(Intel.RealSense.Stream.Color);                           // 取得外部參數
                extrinsics = vsp.GetExtrinsicsTo(sp);
                CamState   = CameraState.Opened;                                                                // 更新相機狀態
            }
            else
            {
                try
                {
                    FrameSet frames = Camera.WaitForFrames();
                    depth    = frames.DepthFrame.DisposeWith(frames);
                    color    = frames.ColorFrame.DisposeWith(frames);
                    filtered = depth;
                    if (depth != null)
                    {
                        //Thres_Filter.Options[Option.MinDistance].Value = float.Parse(form1.textBox2.Text);
                        //Thres_Filter.Options[Option.MaxDistance].Value = float.Parse(form1.textBox1.Text);
                        //filtered = Thres_Filter.Process(filtered);

                        //Spa_Filter.Options[Option.FilterMagnitude].Value = 1;
                        //Spa_Filter.Options[Option.FilterSmoothAlpha].Value = 0.6f;
                        //Spa_Filter.Options[Option.FilterSmoothDelta].Value = 8;
                        //filtered = Spa_Filter.Process(filtered);

                        Temp_Filter.Options[Option.FilterSmoothAlpha].Value = 0.5f;
                        Temp_Filter.Options[Option.FilterSmoothDelta].Value = 20;
                        Temp_Filter.Options[Option.HolesFill].Value         = 2;
                        filtered = Temp_Filter.Process(filtered);

                        depColor      = colorizer.Colorize(depth);
                        filteredColor = colorizer.Colorize(filtered);

                        ColorImg    = new Image <Bgr, byte>(color.Width, color.Height, color.Stride, color.Data);
                        DepthImg    = new Image <Rgb, byte>(depColor.Width, depColor.Height, depColor.Stride, depColor.Data);
                        FilteredImg = new Image <Rgb, byte>(filteredColor.Width, filteredColor.Height, filteredColor.Stride, filteredColor.Data);
                    }
                }
                catch (Exception ex)
                {
                    throw ex;
                }
            }
        }
        private void SetupWindow(out Action <VideoFrame> depth, out Action <VideoFrame> color, PipelineProfile pp)
        {
            //Display Depth
            using (VideoStreamProfile p = pp.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>())
                imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            depth = UpdateImage(imgDepth);

            //Display Color
            using (VideoStreamProfile p = pp.GetStream(Intel.RealSense.Stream.Color).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            color = UpdateImage(imgColor);
        }
        private void StartProcessingBlock(Action <VideoFrame> depth, Action <VideoFrame> color)
        {
            processingBlock.Start(f =>
            {
                using (var frames = FrameSet.FromFrame(f))
                {
                    VideoFrame colorFrame     = frames.ColorFrame.DisposeWith(frames);
                    Intrinsics depthintr      = (pp.GetStream(Stream.Depth).As <VideoStreamProfile>()).GetIntrinsics();
                    DepthFrame depthFrame     = frames.DepthFrame.DisposeWith(frames);
                    VideoFrame colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames);

                    Dispatcher.Invoke(DispatcherPriority.Render, depth, colorizedDepth);
                    Dispatcher.Invoke(DispatcherPriority.Render, color, colorFrame);
                }
            });

            Task.Factory.StartNew(() =>
            {
                while (true)
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        // Invoke custom processing block
                        processingBlock.Process(frames);
                    }
                }
            });
        }
    protected virtual void OnStartStreaming(PipelineProfile activeProfile)
    {
        using (var profile = activeProfile.GetStream(sourceStreamType))
        {
            var videoProfile = profile as VideoStreamProfile;
            if (videoProfile == null)
            {
                Debug.LogWarningFormat("{0} not in active profile", sourceStreamType);
                return;
            }

            if (texture != null)
            {
                Destroy(texture);
            }

            texture = new Texture2D(videoProfile.Width, videoProfile.Height, textureFormat, false, true)
            {
                wrapMode   = TextureWrapMode.Clamp,
                filterMode = filterMode
            };
            texture.Apply();
            textureBinding.Invoke(texture);
        }
        if (realSenseDevice.processMode == RealSenseDevice.ProcessMode.UnityThread)
        {
            UnityEngine.Assertions.Assert.AreEqual(threadId, Thread.CurrentThread.ManagedThreadId);
            realSenseDevice.onNewSample += OnNewSampleUnityThread;
        }
        else
        {
            realSenseDevice.onNewSample += OnNewSampleThreading;
        }
    }
        private void SetupWindow(PipelineProfile pipelineProfile, out Action <VideoFrame> depth, out Action <VideoFrame> color, out Action <VideoFrame> ir1, out Action <VideoFrame> ir2)
        {
            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>())
                imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            depth = UpdateImage(imgDepth);

            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Color).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            color = UpdateImage(imgColor);

            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Infrared).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            ir1 = UpdateImage(imgIRleft);

            using (var p = pipelineProfile.GetStream(Intel.RealSense.Stream.Infrared).As <VideoStreamProfile>())
                imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            ir2 = UpdateImage(imgIRright);


            //using (var p = pipelineProfile.GetStream(Stream.Color).As<VideoStreamProfile>())
            //    ip.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
            //iproc = UpdateImage(ip);
        }
Beispiel #9
0
        private CustomProcessingBlock StartProcessingBlock(Pipeline pipe, PipelineProfile pp)
        {
            SetupFilters(out Colorizer colorizer, out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out Align align_to);

            CustomProcessingBlock processingBlock = SetupProcessingBlock(pipe, colorizer, decimate, spatial, temp, holeFill, align_to);

            processingBlock.Start(f =>
            {
                using (var frames2 = FrameSet.FromFrame(f))
                {
                    var depthintr   = (pp.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics();
                    var depth_frame = frames2.DepthFrame.DisposeWith(frames2);
                    float udist     = depth_frame.GetDistance(ePoint.X, ePoint.Y); //From
                    var point       = DeprojectPixelToPoint(depthintr, new PointF(ePoint.X, ePoint.Y), udist);
                    Console.WriteLine($"({point[0]},{point[1]},{point[2]})");
                }
            });
            return(processingBlock);
        }
Beispiel #10
0
        private void CameraStart()
        {
            // Setup config settings
            var cfg = new Config();

            cfg.EnableStream(Stream.Depth, 1280, 720, Format.Z16, 30);
            cfg.EnableStream(Stream.Color, 1280, 720, Format.Bgr8, 30);

            // Pipeline start
            Pipeline        pipeline = new Pipeline();
            PipelineProfile pp       = pipeline.Start(cfg);

            using (var p = pp.GetStream(Stream.Color) as VideoStreamProfile)
                Img_main.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, System.Windows.Media.PixelFormats.Rgb24, null);
            Action <VideoFrame> updateColor = UpdateImage(Img_main);

            // Setup filter / alignment settings
            SetupFilters(out Colorizer colorizer, out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out Align align_to);
            // Setup frame processing
            CustomProcessingBlock processingBlock = SetupProcessingBlock(pipeline, colorizer, decimate, spatial, temp, holeFill, align_to);

            // Start frame processing
            StartProcessingBlock(processingBlock, pp, updateColor, pipeline);
        }
Beispiel #11
0
    private void OnStartStreaming(PipelineProfile activeProfile)
    {
        pc = new PointCloud();

        using (var profile = activeProfile.GetStream(stream))
        {
            if (profile == null)
            {
                Debug.LogWarningFormat("Stream {0} not in active profile", stream);
            }
        }

        using (var profile = activeProfile.GetStream(Stream.Depth) as VideoStreamProfile)
        {
            intrinsics = profile.GetIntrinsics();

            Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));
            uvmap = new Texture2D(profile.Width, profile.Height, TextureFormat.RGFloat, false, true)
            {
                wrapMode   = TextureWrapMode.Clamp,
                filterMode = FilterMode.Point,
            };
            GetComponent <MeshRenderer>().sharedMaterial.SetTexture("_UVMap", uvmap);

            if (mesh != null)
            {
                Destroy(mesh);
            }

            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            };

            vertices    = new Vector3[profile.Width * profile.Height];
            handle      = GCHandle.Alloc(vertices, GCHandleType.Pinned);
            verticesPtr = handle.AddrOfPinnedObject();

            var indices = new int[vertices.Length];
            for (int i = 0; i < vertices.Length; i++)
            {
                indices[i] = i;
            }

            mesh.MarkDynamic();
            mesh.vertices = vertices;

            var uvs = new Vector2[vertices.Length];
            Array.Clear(uvs, 0, uvs.Length);
            var invSize = new Vector2(1f / profile.Width, 1f / profile.Height);
            for (int j = 0; j < profile.Height; j++)
            {
                for (int i = 0; i < profile.Width; i++)
                {
                    uvs[i + j * profile.Width].x = i * invSize.x;
                    uvs[i + j * profile.Width].y = j * invSize.y;
                }
            }

            mesh.uv = uvs;

            mesh.SetIndices(indices, MeshTopology.Points, 0, false);
            mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

            GetComponent <MeshFilter>().sharedMesh = mesh;
        }

        RealSenseDevice.Instance.onNewSampleSet += OnFrames;
    }
Beispiel #12
0
        private void StartProcessingBlock(CustomProcessingBlock processingBlock, PipelineProfile pp, Action <VideoFrame> updateColor, Pipeline pipeline)
        {
            float[,,] posMap = new float[1280, 720, 3];

            Size RS_depthSize = new Size(1280, 720);
            Mat  processMat   = new Mat(RS_depthSize, DepthType.Cv8U, 3);

            processingBlock.Start(f =>
            {
                using (var frames = FrameSet.FromFrame(f))
                {
                    //var color_frame = frames.ColorFrame.DisposeWith(frames);
                    //color_frame.CopyTo(processMat.DataPointer);

                    var depthintr   = (pp.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics();
                    var depth_frame = frames.DepthFrame.DisposeWith(frames);

                    //float depth = depth_frame.GetDistance((int)thePoint.X,(int)thePoint.Y); //From
                    //thePos = HelperClass.DeprojectPixelToPoint(depthintr, thePoint, depth);

                    unsafe
                    {
                        Int16 *pixelPtr_byte = (Int16 *)depth_frame.Data;
                        for (int i = 0; i < 1280; i++)
                        {
                            for (int j = 0; j < 720; j++)
                            {
                                var tmpF        = HelperClass.DeprojectPixelToPoint(depthintr, new PointF(i, j), (float)pixelPtr_byte[j * 1280 + i] / 1000f);
                                posMap[i, j, 0] = tmpF[0];
                                posMap[i, j, 1] = tmpF[1];
                                posMap[i, j, 2] = tmpF[2];
                            }
                        }
                    }
                    // Dispatcher.Invoke(DispatcherPriority.Render, updateColor, color_frame);//顯示用
                }
            });


            //start
            var token = _tokenSource.Token;
            Action <VideoFrame> updateOriginColor = UpdateImage(Img_main);
            var t = Task.Factory.StartNew(() =>
            {
                Mat color_orig   = new Mat(RS_depthSize, DepthType.Cv8U, 3);
                Mat color_resize = new Mat(RS_depthSize, DepthType.Cv8U, 3);

                yoloWrapper = new YoloWrapper("modle\\yolov3-tiny-3obj.cfg", "modle\\yolov3-tiny-3obj_3cup.weights", "modle\\obj.names");
                string detectionSystemDetail = string.Empty;
                if (!string.IsNullOrEmpty(yoloWrapper.EnvironmentReport.GraphicDeviceName))
                {
                    detectionSystemDetail = $"({yoloWrapper.EnvironmentReport.GraphicDeviceName})";
                }
                Console.WriteLine($"Detection System:{yoloWrapper.DetectionSystem}{detectionSystemDetail}");

                while (!token.IsCancellationRequested)
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        if (showType == imgType.color)
                        {
                            VideoFrame color_frame = frames.ColorFrame.DisposeWith(frames);
                            color_frame.CopyTo(color_orig.DataPointer);

                            CvInvoke.WarpPerspective(color_orig, color_resize, matrix, new Size(1280, 720));


                            //CvInvoke.Imwrite("yolo1.png", color_resize);
                            //try { items = yoloWrapper.Detect(@"yolo1.png"); }
                            //catch { break; }
                            CvInvoke.Imwrite("yolo2.png", color_orig);
                            try { items = yoloWrapper.Detect(@"yolo2.png"); }
                            catch { break; }

                            CvInvoke.CvtColor(color_resize, color_resize, ColorConversion.Bgr2Rgb);
                            processingBlock.ProcessFrames(frames);

                            foreach (YoloItem item in items)
                            {
                                string name = item.Type;
                                //int x = item.X;
                                //int y = item.Y;
                                mapToRsImg(item.X, item.Y, out int x, out int y);
                                mapToRsImg(item.X + item.Width, item.Y + item.Height, out int x2, out int y2);

                                //int H = item.Height;
                                //int W = item.Width;
                                int H = y2 - y;
                                int W = x2 - x;

                                mapToRsImg(item.Center().X, item.Center().Y, out int Cx, out int Cy);
                                //Point center = item.Center();
                                Point center = new Point(Cx, Cy);

                                int evilLine   = 500;
                                int evilLinex1 = 580;
                                int evilLinex2 = 660;
                                if (showEvilLine)
                                {
                                    CvInvoke.Line(color_resize, new Point(0, evilLine), new Point(1280, evilLine), new MCvScalar(100, 100, 250), 2);         //以上代表可能在咖啡機
                                    CvInvoke.Line(color_resize, new Point(evilLinex1, 0), new Point(evilLinex1, evilLine), new MCvScalar(100, 100, 250), 2); //
                                    CvInvoke.Line(color_resize, new Point(evilLinex2, 0), new Point(evilLinex2, evilLine), new MCvScalar(100, 100, 250), 2); //
                                }

                                if (y > evilLine || x < evilLinex1 || x > evilLinex2) //代不再咖啡機附近
                                {
                                    if (item.Confidence < 0.5)                        //沒信心的東西就跳過,避免偵測到其他東西
                                    {
                                        continue;
                                    }
                                }


                                float[] objPos = new float[] { posMap[center.X, center.Y, 0], posMap[center.X, center.Y, 1], posMap[center.X, center.Y, 2] };

                                if (objPos[0] == 0 || objPos[1] == 0 || objPos[2] == 0)//不然會影響平均
                                {
                                    continue;
                                }

                                if (name == "blue cup")//index 0
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 > clr.V2)//R>B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    //  CvInvoke.PutText(color_resize, "B", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(80, 150, 220), 3);

                                    process_actionOfCups(cups[0], mat_cup, TB_Bcup_msg, TB_Bcup_state, objPos, 10, 40);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Bcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                                else if (name == "pink cup")//index 1
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 < clr.V2)//R<B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    // CvInvoke.PutText(color_resize, "P", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(250, 80, 80), 3);

                                    process_actionOfCups(cups[1], mat_cup, TB_Pcup_msg, TB_Pcup_state, objPos, 60, 90);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Pcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                            }//foreach cups
                            timeTick++;
                            this.Dispatcher.Invoke((Action)(() =>
                            {
                                img_cupState.Source = BitmapSourceConvert.ToBitmapSource(mat_cup);
                            }));
                            color_frame.CopyFrom(color_resize.DataPointer);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateOriginColor, color_frame);
                        }
                        else if (showType == imgType.mix)//顯示 mix 圖
                        {
                            processingBlock.ProcessFrames(frames);
                        }
                    }
                }
            }, token);
        }
Beispiel #13
0
    private void OnStartStreaming(PipelineProfile activeProfile)
    {
        pc          = new PointCloud();
        spatial     = new SpatialFilter();
        temporal    = new TemporalFilter();
        holeFilling = new HoleFillingFilter();

        using (var profile = activeProfile.GetStream(stream))
        {
            if (profile == null)
            {
                Debug.LogWarningFormat("Stream {0} not in active profile", stream);
            }
        }

        using (var profile = activeProfile.GetStream(Stream.Depth) as VideoStreamProfile)
        {
            intrinsics = profile.GetIntrinsics();

            Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));


            numParticles = (profile.Width - 1) * (profile.Height - 1) * 2;

            vertices    = new Vector3[profile.Width * profile.Height];
            handle      = GCHandle.Alloc(vertices, GCHandleType.Pinned);
            verticesPtr = handle.AddrOfPinnedObject();

            var indices = new int[(profile.Width - 1) * (profile.Height - 1) * 6];

            var iIdx = 0;
            for (int j = 0; j < profile.Height; j++)
            {
                for (int i = 0; i < profile.Width; i++)
                {
                    if (i < profile.Width - 1 && j < profile.Height - 1)
                    {
                        var idx = i + j * profile.Width;
                        var y   = profile.Width;
                        indices[iIdx++] = idx + 0;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + 1;

                        indices[iIdx++] = idx + 1;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + y + 1;
                    }
                }
            }

            particleBuffer = new ComputeBuffer(numParticles, Marshal.SizeOf(typeof(VoxelParticle)));
            vertexBuffer   = new ComputeBuffer(vertices.Length, sizeof(float) * 3);
            indicesBuffer  = new ComputeBuffer(indices.Length, sizeof(int));

            vertexBuffer.SetData(vertices);
            indicesBuffer.SetData(indices);
            renderer.SetBuffer("_VoxelBuffer", particleBuffer);

            ResetParticle();

            if (mesh != null)
            {
                Destroy(mesh);
            }

            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            };
            mesh.MarkDynamic();

            mesh.vertices = new Vector3[numParticles];
            var newIdices = Enumerable.Range(0, numParticles).ToArray();

            mesh.SetIndices(newIdices, MeshTopology.Points, 0, false);
            mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

            GetComponent <MeshFilter>().sharedMesh = mesh;
        }

        RealSenseDevice.Instance.onNewSampleSet += OnFrames;
    }
        void Init()
        {
            try
            {
                #region FILTERS

                spatialFilter = new SpatialFilter();
                spatialFilter.Options[Option.FilterMagnitude].Value   = 5.0F;
                spatialFilter.Options[Option.FilterSmoothAlpha].Value = 0.25F;
                spatialFilter.Options[Option.FilterSmoothDelta].Value = 50.0F;

                decimationFilter = new DecimationFilter();
                decimationFilter.Options[Option.FilterMagnitude].Value = 2.0F;

                holeFilter = new HoleFillingFilter();

                thresholdFilter = new ThresholdFilter();
                //thresholdFilter.Options[Option.MinDistance].Value = 0.73F;
                //thresholdFilter.Options[Option.MaxDistance].Value = 0.81F;

                #endregion

                align_to  = new Align(Intel.RealSense.Stream.Depth);
                colorizer = new Colorizer();
                pipeline  = new Pipeline();

                //CONFIG SETTINGS
                var cfg = new Config();
                cfg.EnableStream(Intel.RealSense.Stream.Depth, resolutionW, resolutionH, Format.Z16, FPS); //depth resolution manuel change
                cfg.EnableStream(Intel.RealSense.Stream.Color, 640, 480, Format.Rgb8, 30);
                pipelineProfile = pipeline.Start(cfg);                                                     //stream starting with user config

                var advancedDevice = AdvancedDevice.FromDevice(pipelineProfile.Device);                    //connected device
                //read device's configuration settings from json file
                advancedDevice.JsonConfiguration = File.ReadAllText(@"CustomConfig.json");
                selectedDevice = pipelineProfile.Device;

                #region Field Of View Info

                float[] dfov, cfov, irfov;

                var        depth_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth);
                Intrinsics depthIntr    = depth_stream.GetIntrinsics();
                dfov = depthIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                var        color_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Color);
                Intrinsics colorIntr    = color_stream.GetIntrinsics();
                cfov = colorIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                var        ir_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Infrared);
                Intrinsics irIntr    = ir_stream.GetIntrinsics();
                irfov = irIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                lblDepthFov.Text    = "Depth FOV : " + "H = " + Convert.ToInt32(dfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(dfov[1]).ToString() + "°";
                lblColorFov.Text    = "RGB FOV   : " + "H = " + Convert.ToInt32(cfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(cfov[1]).ToString() + "°";
                lblInfraredFov.Text = "IR FOV   : " + "H = " + Convert.ToInt32(irfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(irfov[1]).ToString() + "°";


                #endregion


                //get primary screen resolutions
                screenWidth  = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenWidth.ToString());
                screenHeight = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenHeight.ToString());

                //camera started working. transfer image to interface
                SetupWindow(pipelineProfile, out updateDepth, out updateColor, out updateIR1, out updateIR2);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Beispiel #15
0
    private void OnStartStreaming(PipelineProfile obj)
    {
        q = new FrameQueue(1);

        using (var depth = obj.GetStream(Stream.Depth) as VideoStreamProfile)
        {
            Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));


            numParticles = (depth.Width - 1) * (depth.Height - 1) * 2;

            vertices    = new Vector3[depth.Width * depth.Height];
            handle      = GCHandle.Alloc(vertices, GCHandleType.Pinned);
            verticesPtr = handle.AddrOfPinnedObject();

            var indices = new int[(depth.Width - 1) * (depth.Height - 1) * 6];

            var iIdx = 0;
            for (int j = 0; j < depth.Height; j++)
            {
                for (int i = 0; i < depth.Width; i++)
                {
                    if (i < depth.Width - 1 && j < depth.Height - 1)
                    {
                        var idx = i + j * depth.Width;
                        var y   = depth.Width;
                        indices[iIdx++] = idx + 0;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + 1;

                        indices[iIdx++] = idx + 1;
                        indices[iIdx++] = idx + y;
                        indices[iIdx++] = idx + y + 1;
                    }
                }
            }

            particleBuffer = new ComputeBuffer(numParticles, Marshal.SizeOf(typeof(VoxelParticle)));
            vertexBuffer   = new ComputeBuffer(vertices.Length, sizeof(float) * 3);
            indicesBuffer  = new ComputeBuffer(indices.Length, sizeof(int));

            vertexBuffer.SetData(vertices);
            indicesBuffer.SetData(indices);
            renderer.SetBuffer("_VoxelBuffer", particleBuffer);

            SetMotionParticle();
            ResetParticle();

            if (mesh != null)
            {
                Destroy(mesh);
            }

            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            };
            mesh.MarkDynamic();

            mesh.vertices = new Vector3[numParticles];
            var newIdices = Enumerable.Range(0, numParticles).ToArray();

            mesh.SetIndices(newIdices, MeshTopology.Points, 0, false);
            mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

            GetComponent <MeshFilter>().sharedMesh = mesh;
        }

        source.OnNewSample += OnNewSample;
    }
Beispiel #16
0
        private void StartCapture(int networkHeight, ComputationMode computationMode)
        {
            try {
                bool bDevicesFound = QueryRealSenseDevices();
                if (bDevicesFound == false)
                {
                    Console.WriteLine("Cannot start acquisition as no RealSense is connected.");
                    toggleStartStop.IsChecked    = false;
                    this.toggleStartStop.Content = "\uF5B0";
                    // Enable all controls
                    this.computationBackend.IsEnabled = true;
                    this.networkSlider.IsEnabled      = true;
                    // Stop demo

                    string acq_msg = string.Format("Acquisition Status:\t OFFLINE");
                    acquisition_status.Dispatcher.BeginInvoke((Action) delegate { acquisition_status.Text = acq_msg; });
                    return;
                }

                // get the selected image width and height
                int nImageWidth  = sensorResolutions[resolutionOptionBox.SelectedIndex].Width;
                int nImageHeight = sensorResolutions[resolutionOptionBox.SelectedIndex].Height;

                Console.WriteLine(
                    string.Format("Enabling the {0} S.No: {1}",
                                  availableDevices[camera_source.SelectedIndex].Info[CameraInfo.Name],
                                  availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber]));
                Console.WriteLine(
                    string.Format("Selected resolution for the image acquisition is {0}x{1}", nImageWidth, nImageHeight));
                Console.WriteLine(string.Format("Selected network size: {0} along with {1} as the computation device",
                                                networkHeight,
                                                computationMode));
                selectedDeviceSerial = availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber];
                // Create and config the pipeline to stream color and depth frames.
                cfg.EnableDevice(availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber]);
                cfg.EnableStream(Intel.RealSense.Stream.Color, nImageWidth, nImageHeight, Format.Bgr8, framerate: 30);
                cfg.EnableStream(Intel.RealSense.Stream.Depth, nImageWidth, nImageHeight, framerate: 30);

                Task.Factory.StartNew(() => {
                    try {
                        // Create and config the pipeline to stream color and depth frames.
                        pp = pipeline.Start(cfg);
                        intrinsicsDepthImagerMaster =
                            (pp.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>()).GetIntrinsics();

                        // Initialise cubemos DNN framework with the required deep learning model and the target compute
                        // device. Currently CPU and GPU are supported target devices. FP32 model is necessary for the
                        // CPU and FP16 model is required by the Myriad device and GPU

                        Cubemos.SkeletonTracking.Api skeletontrackingApi;

                        String cubemosModelDir = Common.DefaultModelDir();

                        var computeDevice = Cubemos.TargetComputeDevice.CM_CPU;
                        var modelFile     = cubemosModelDir + "\\fp32\\skeleton-tracking.cubemos";

                        if (computationMode == ComputationMode.GPU)
                        {
                            computeDevice = Cubemos.TargetComputeDevice.CM_GPU;
                            modelFile     = cubemosModelDir + "\\fp16\\skeleton-tracking.cubemos";
                        }
                        else if (computationMode == ComputationMode.MYRIAD)
                        {
                            computeDevice = Cubemos.TargetComputeDevice.CM_MYRIAD;
                            modelFile     = cubemosModelDir + "\\fp16\\skeleton-tracking.cubemos";
                        }

                        var licenseFolder = Common.DefaultLicenseDir();
                        try {
                            skeletontrackingApi = new SkeletonTracking.Api(licenseFolder);
                        }
                        catch (Exception ex) {
                            throw new Cubemos.Exception(
                                String.Format("Activation key or license key not found in {0}.\n " +
                                              "If you haven't activated the SDK yet, please run post_installation script as described in the Getting Started Guide to activate your license.",
                                              licenseFolder));
                        }

                        try {
                            skeletontrackingApi.LoadModel(computeDevice, modelFile);
                        }
                        catch (Exception ex) {
                            if (File.Exists(modelFile))
                            {
                                throw new Cubemos.Exception(
                                    "Internal error occured during model initialization. Please make sure your compute device satisfies the hardware system requirements.");
                            }
                            else
                            {
                                throw new Cubemos.Exception(
                                    string.Format("Model file \"{0}\" not found. Details: \"{1}\"", modelFile, ex));
                            }
                        }

                        Console.WriteLine("Finished initialization");

                        Stopwatch fpsStopwatch = new Stopwatch();
                        double fps             = 0.0;
                        int nFrameCnt          = 0;

                        bool firstRun = true;

                        Console.WriteLine("Starting image acquisition and skeleton keypoints");
                        while (!tokenSource.Token.IsCancellationRequested)
                        {
                            int pipelineID = 1;
                            if (bEnableTracking == false)
                            {
                                pipelineID = 0;
                            }

                            fpsStopwatch.Restart();

                            // We wait for the next available FrameSet and using it as a releaser object that would
                            // track all newly allocated .NET frames, and ensure deterministic finalization at the end
                            // of scope.
                            using (var releaser = new FramesReleaser())
                            {
                                using (var frames = pipeline.WaitForFrames())
                                {
                                    if (frames.Count != 2)
                                    {
                                        Console.WriteLine("Not all frames are available...");
                                    }

                                    var f = frames.ApplyFilter(align).DisposeWith(releaser).AsFrameSet().DisposeWith(
                                        releaser);

                                    var colorFrame = f.ColorFrame.DisposeWith(releaser);
                                    depthFrame     = f.DepthFrame.DisposeWith(releaser);

                                    var alignedDepthFrame = align.Process <DepthFrame>(depthFrame).DisposeWith(f);

                                    if (temporalFilterEnabled)
                                    {
                                        alignedDepthFrame = temp.Process <DepthFrame>(alignedDepthFrame).DisposeWith(f);
                                    }

                                    // We colorize the depth frame for visualization purposes
                                    var colorizedDepth =
                                        colorizer.Process <VideoFrame>(alignedDepthFrame).DisposeWith(f);

                                    // Preprocess the input image
                                    Bitmap inputImage    = FrameToBitmap(colorFrame);
                                    Bitmap inputDepthMap = FrameToBitmap((VideoFrame)colorizedDepth);

                                    // Run the inference on the preprocessed image
                                    List <SkeletonKeypoints> skeletonKeypoints;
                                    skeletontrackingApi.RunSkeletonTracking(
                                        ref inputImage, networkHeight, out skeletonKeypoints, pipelineID);

                                    if (firstRun)
                                    {
                                        Cnv2.Dispatcher.BeginInvoke((Action) delegate { Panel.SetZIndex(Cnv2, -1); },
                                                                    System.Windows.Threading.DispatcherPriority.Render);

                                        toggleStartStop.Dispatcher.BeginInvoke(
                                            (Action) delegate { toggleStartStop.IsEnabled = true; });

                                        firstRun = false;
                                    }

                                    Bitmap displayImage;
                                    if (bShowOnlySkeletons)
                                    {
                                        displayImage = new Bitmap(inputImage.Width, inputImage.Height);
                                        using (Graphics g = Graphics.FromImage(displayImage))
                                        {
                                            g.Clear(System.Drawing.Color.Black);
                                        }
                                    }
                                    else
                                    {
                                        displayImage = new Bitmap(inputImage);
                                    }

                                    Graphics graphics = Graphics.FromImage(displayImage);

                                    // Render the correct skeletons detected from the inference
                                    if (true == bRenderSkeletons)
                                    {
                                        renderSkeletons(
                                            skeletonKeypoints, nImageWidth, nImageHeight, bEnableTracking, graphics);
                                    }

                                    if (true == bRenderCoordinates)
                                    {
                                        renderCoordinates(skeletonKeypoints, nImageWidth, graphics);
                                    }

                                    if (false == bHideRenderImage)   // Render the final frame onto the display window
                                    {
                                        imgColor.Dispatcher.BeginInvoke(renderDelegate, imgColor, displayImage);
                                    }
                                    if (true == bRenderDepthMap)   // Overlay the depth map onto the display window
                                    {
                                        imgColor.Dispatcher.BeginInvoke(renderDelegate, imgDepth, inputDepthMap);
                                    }

                                    nFrameCnt++;
                                    fps += (double)(1000.0 / (double)fpsStopwatch.ElapsedMilliseconds);

                                    if (nFrameCnt % 25 == 0)
                                    {
                                        string msg = String.Format("FPS:\t\t\t{0:F2}", fps / nFrameCnt);
                                        fps_output.Dispatcher.BeginInvoke((Action) delegate { fps_output.Text = msg; });
                                        fps       = 0;
                                        nFrameCnt = 0;
                                    }

                                    string msg_person_count =
                                        string.Format("Person Count:\t\t{0}", skeletonKeypoints.Count);
                                    person_count.Dispatcher.BeginInvoke(
                                        (Action) delegate { person_count.Text = msg_person_count; });
                                }
                            }
                        }
                    }
                    catch (System.Exception exT) {
                        string errorMsg = string.Format(
                            "Internal Error Occured. Application will now close.\nError Details:\n\n\"{0}\"",
                            exT.Message);
                        Cnv2.Dispatcher.BeginInvoke(
                            new InfoDialog.ShowInfoDialogDelegate(InfoDialog.ShowInfoDialog), "Error", errorMsg);
                    }
                }, tokenSource.Token);
            }
            catch (System.Exception ex) {
                string errorMsg = string.Format(
                    "Internal Error Occured. Application will now close.\nError Details:\n\n\"{0}\"", ex.Message);
                Cnv2.Dispatcher.BeginInvoke(
                    new InfoDialog.ShowInfoDialogDelegate(InfoDialog.ShowInfoDialog), "Error", errorMsg);
            }
        }