コード例 #1
0
    //object l = new object();
    private void OnFrames(FrameSet frames)
    {
        using (var aligned = aligner.Process(frames))
        {
            using (var colorFrame = aligned.ColorFrame)
                using (var depthFrame = aligned.DepthFrame)
                {
                    if (depthFrame == null)
                    {
                        Debug.Log("No depth frame in frameset, can't create point cloud");
                        return;
                    }

                    if (!UpdateParticleParams(depthFrame.Width, depthFrame.Height))
                    {
                        Debug.Log("Unable to craete point cloud");
                        return;
                    }

                    using (var points = pc.Calculate(depthFrame))
                    {
                        setParticals(points, colorFrame);
                    }
                }
        }
    }
コード例 #2
0
 private void OnFrameSet(FrameSet frames)
 {
     using (var aligned = aligner.Process(frames))
     {
         using (var f = aligned[sourceStreamType])
             OnFrame(f);
     }
 }
コード例 #3
0
 private void OnFrameSet(FrameSet frames)
 {
     using (var aligned = aligner.Process(frames))
     {
         from.OnFrame(aligned.DepthFrame);
         to.OnFrame(aligned.ColorFrame);
     }
 }
コード例 #4
0
    public override Frame Process(Frame frame, FrameSource frameSource)
    {
        if (_pb == null || _alignTo != _currAlignTo)
        {
            Init();
        }

        return(_pb.Process(frame));
    }
コード例 #5
0
    private void OnFrameSet(FrameSet frames)
    {
        using (var aligned = aligner.Process(frames))
        {
            using (var depth = aligned.DepthFrame)
                from.OnFrame(depth);

            using (var color = aligned.ColorFrame)
                to.OnFrame(color);
        }
    }
コード例 #6
0
    private void OnFrame(FrameSet frameset)
    {
        //Align depth frame to color frame
        using (FrameSet aligned = aligner.Process(frameset))
        {
            //DepthFrame
            depthFrame = aligned.Where(f => f.Profile.Stream == Stream.Depth).First() as DepthFrame;

            if (depthFrame == null)
            {
                Debug.Log("depth frame is null");
                return;
            }

            //ColorFrame
            vidFrame = aligned.Where(f => f.Profile.Stream == Stream.Color).First() as VideoFrame;

            if (vidFrame == null)
            {
                Debug.Log("color frame is null");
                return;
            }

            UpdateParticleParams(depthFrame.Width, depthFrame.Height, depthFrame.Profile.Format);

            //CoordinateData
            var points = pc.Calculate(depthFrame);
            vertices = vertices ?? new Points.Vertex[points.Count];
            points.CopyTo(vertices);

            //ColorData
            colorData = colorData ?? new byte[vidFrame.Stride * vidFrame.Height];
            vidFrame.CopyTo(colorData);

            for (int index = 0; index < particleSize; index += skipParticles)
            {
                var v = vertices[index];

                if (v.z > 0)
                {
                    particles[index].position   = new Vector3(v.x, v.y, v.z);
                    particles[index].startSize  = pointsSize;
                    particles[index].startColor = new Color32(colorData[index * 3], colorData[index * 3 + 1], colorData[index * 3 + 2], 255);
                }

                else
                {
                    particles[index].position   = new Vector3(0, 0, 0);
                    particles[index].startSize  = (float)0.0;
                    particles[index].startColor = new Color32(0, 0, 0, 0);
                }
            }
        }
    }
コード例 #7
0
    private void OnFrame(FrameSet frameset)
    {
        using (FrameSet aligned = aligner.Process(frameset))
        {
            //Depth
            depthFrame = aligned.Where(f => f.Profile.Stream == Stream.Depth).First() as DepthFrame;

            //Color
            vidFrame = aligned.Where(f => f.Profile.Stream == Stream.Color).First() as VideoFrame;


            if (depthFrame == null || vidFrame == null)
            {
                // Debug.Log("Frame is not a depth frame");
                return;
            }

            UpdateParticleParams(depthFrame.Width, depthFrame.Height, depthFrame.Profile.Format);

            var points = pc.Calculate(depthFrame);

            //Depth
            vertices = vertices ?? new Points.Vertex[points.Count];
            points.CopyTo(vertices);

            //Color
            byteColorData = byteColorData ?? new byte[vidFrame.Stride * vidFrame.Height];
            vidFrame.CopyTo(byteColorData);

            for (int index = 0; index < particleSize; index += skipParticles)
            {
                var v = vertices[index];

                if (v.z > 0)
                {
                    particles[index].position  = new Vector3(v.x, v.y, v.z);
                    particles[index].startSize = pointsSize;
                    //particles[index].startColor = gradient.Evaluate(v.z);
                    particles[index].startColor = new Color32(byteColorData[index * 3], byteColorData[index * 3 + 1], byteColorData[index * 3 + 2], 255);
                }

                /*
                 * else
                 * {
                 *  particles[index].position = new Vector3(0, 0, 0);
                 *  particles[index].startSize = (float)0.0;
                 *  particles[index].startColor = new Color32(0, 0, 0, 0);
                 * }
                 */
            }
        }
    }
コード例 #8
0
        // Attend que des frames soient disponibles puis copie leur valeurs
        // de couleur et de profondeur pour pouvoir les traiter
        public void WaitThenProcessFrame()
        {
            using (var frames = pipeline.WaitForFrames())
            {
                Align    align           = new Align(Stream.Color).DisposeWith(frames);
                Frame    aligned         = align.Process(frames).DisposeWith(frames);
                FrameSet alignedframeset = aligned.As <FrameSet>().DisposeWith(frames);
                var      colorFrame      = alignedframeset.ColorFrame.DisposeWith(alignedframeset);
                var      depthFrame      = alignedframeset.DepthFrame.DisposeWith(alignedframeset);

                colorFrame.CopyTo(colorArray);
                depthFrame.CopyTo(depthArray);
            }
        }
コード例 #9
0
 public override FrameSet Process(FrameSet frameset, FramesReleaser releaser)
 {
     lock (_lock)
     {
         using (var depth = frameset.DepthFrame)
             using (var color = frameset.ColorFrame)
                 if (_profilesIds.Count == 0 != !_profilesIds.ContainsValue(color.Profile.UniqueID) || !_profilesIds.ContainsValue(depth.Profile.UniqueID))
                 {
                     ResetAligner();
                     _profilesIds[Stream.Depth] = depth.Profile.UniqueID;
                     _profilesIds[Stream.Color] = color.Profile.UniqueID;
                 }
         return(_enabled ? _pb.Process(frameset, releaser) : frameset);
     }
 }
コード例 #10
0
    void onNewSampleSetThreading(FrameSet frameSet)
    {
        using (FrameSet aligned = aligner.Process(frameSet)){
            VideoFrame vidFrame   = aligned.Where(x => x.Profile.Stream == Stream.Color).First() as VideoFrame;
            Frame      depthFrame = aligned.Where(x => x.Profile.Stream == Stream.Depth).First();


            byteColorData = byteColorData ?? new byte[vidFrame.Stride * vidFrame.Height];
            vidFrame.CopyTo(byteColorData);
            uintColorData = Array.ConvertAll(byteColorData, x => (uint)x);

            var points = pointCloud.Calculate(depthFrame);
            vertices = vertices ?? new Points.Vertex[dataLength];
            points.CopyTo(vertices);
        }
    }
コード例 #11
0
        static void runCycle(Pipeline pipe)
        {
            using (var frames = pipe.WaitForFrames())
            {
                Align    align           = new Align(Stream.Color).DisposeWith(frames);
                Frame    aligned         = align.Process(frames).DisposeWith(frames);
                FrameSet alignedframeset = aligned.As <FrameSet>().DisposeWith(frames);
                var      colorFrame      = alignedframeset.ColorFrame.DisposeWith(alignedframeset);
                var      depthFrame      = alignedframeset.DepthFrame.DisposeWith(alignedframeset);

                var colorArray = new byte[CAMERA_WIDTH * CAMERA_HEIGHT * 3];
                colorFrame.CopyTo(colorArray);
                var depthArray = new UInt16[CAMERA_WIDTH * CAMERA_HEIGHT];
                depthFrame.CopyTo(depthArray);

                showPixelInfos(CAMERA_WIDTH / 2, CAMERA_HEIGHT / 2, colorArray, depthArray);
            }
        }
コード例 #12
0
        // Take the Frame as parameter adn Return the Depth and Color datas
        private static Tuple <byte[], ushort[], float[], float[]> Frame(FrameSet frames)
        {
            Align    align    = new Align(Intel.RealSense.Stream.Color).DisposeWith(frames);
            Frame    aligned  = align.Process(frames).DisposeWith(frames);
            FrameSet frameset = aligned.As <FrameSet>().DisposeWith(frames);

            VideoFrame colorFrame = frameset.ColorFrame.DisposeWith(frameset);
            VideoFrame depthFrame = frameset.DepthFrame.DisposeWith(frameset);

            // Pixels datas
            byte[]   colorArray = ColorArray(colorFrame);
            ushort[] depthArray = DepthArray(depthFrame);

            // Points datas
            float[] verticesArray    = VerticeArray(depthFrame, colorFrame);
            float[] coordinatesArray = CoordinatesArray(depthFrame, colorFrame);

            return(new Tuple <byte[], ushort[], float[], float[]>(colorArray, depthArray, verticesArray, coordinatesArray));
        }
コード例 #13
0
        private void WaitForFrames()
        {
            while (!_streamingEvent.WaitOne(0))
            {
                using (FrameSet set = _pipeline.WaitForFrames())
                {
                    _frameData.Timestamp = DateTime.Now;

                    using (VideoFrame colorFrame = set.ColorFrame)
                    {
                        colorFrame.CopyTo(_frameData.ColorData);
                    }
                    using (FrameSet processed = _aligner.Process(set))
                        using (DepthFrame depthFrame = processed.DepthFrame)
                        {
                            depthFrame.CopyTo(_frameData.DepthData);
                        }

                    OnFrameDataArrived?.Invoke(_frameData);
                }
            }
        }
コード例 #14
0
 private void OnFrameSet(FrameSet frames)
 {
     using (var aligned = aligner.Process(frames))
     {
         using (var f = aligned[sourceStreamType])
         {
             using (VideoFrame vPost = f as VideoFrame)
             {
                 using (VideoFrame v1 = decimationFilter.ApplyFilter(vPost))
                 {
                     using (VideoFrame v2 = spatialFilter.ApplyFilter(v1))
                     {
                         using (VideoFrame vOut = temporalFilter.ApplyFilter(v2))
                         {
                             OnFrame(vOut);
                         }
                     }
                 }
             }
         }
     }
 }
コード例 #15
0
        private void RunThread(CancellationToken token)
        {
            Intel.RealSense.PointCloud pc = new Intel.RealSense.PointCloud();

            DecimationFilter dec_filter  = new DecimationFilter();
            SpatialFilter    spat_filter = new SpatialFilter();
            TemporalFilter   temp_filter = new TemporalFilter();

            dec_filter.Options[Option.FilterMagnitude].Value = DecimationMagnitude;

            spat_filter.Options[Option.FilterMagnitude].Value   = SpatialMagnitude;
            spat_filter.Options[Option.FilterSmoothAlpha].Value = Math.Min(1.0f, (float)SpatialSmoothAlpha);
            spat_filter.Options[Option.FilterSmoothDelta].Value = (float)SpatialSmoothDelta;

            temp_filter.Options[Option.FilterSmoothAlpha].Value = Math.Min(1.0f, (float)TemporalSmoothAlpha);
            temp_filter.Options[Option.FilterSmoothDelta].Value = (float)TemporalSmoothDelta;

            List <ProcessingBlock> filters = new List <ProcessingBlock> {
                dec_filter, spat_filter, temp_filter
            };
            Align align_to_depth = new Align(Stream.Depth);

            var cfg = new Config();

            cfg.EnableStream(Stream.Depth, 640, 480);
            cfg.EnableStream(Stream.Color, 1280, 720, Format.Rgb8);

            var             pipeline = new Pipeline();
            PipelineProfile pp       = null;

            try
            {
                pp = pipeline.Start(cfg);
            }
            catch (Exception e)
            {
                RhinoApp.WriteLine("RsToolkit: " + e.Message);
                return;
            }

            while (!token.IsCancellationRequested)
            {
                try
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        var aligned = align_to_depth.Process <FrameSet>(frames).DisposeWith(frames);
                        var color   = aligned.ColorFrame.DisposeWith(frames);

                        pc.MapTexture(color);

                        var filtered = aligned[Stream.Depth].DisposeWith(frames);

                        foreach (var filter in filters)
                        {
                            filtered = filter.Process(filtered).DisposeWith(frames);
                        }

                        Points points = pc.Process <Points>(filtered);

                        var vertices   = new Point3f[points.Count];
                        var tex_coords = new Point2f[points.Count];

                        points.CopyVertices <Point3f>(vertices);
                        points.CopyTextureCoords <Point2f>(tex_coords);

                        Debug.Assert(vertices.Length == tex_coords.Length);

                        // ======== CULL INVALID POINTS ========

                        if (true)
                        {
                            var flags    = new bool[vertices.Length];
                            int new_size = 0;
                            for (int i = 0; i < vertices.Length; ++i)
                            {
                                if (vertices[i].Z > 0.1)
                                {
                                    flags[i] = true;
                                    new_size++;
                                }
                            }

                            var new_vertices   = new Point3f[new_size];
                            var new_tex_coords = new Point2f[new_size];

                            for (int i = 0, j = 0; i < vertices.Length; ++i)
                            {
                                if (flags[i])
                                {
                                    new_vertices[j]   = vertices[i];
                                    new_tex_coords[j] = tex_coords[i];
                                    ++j;
                                }
                            }

                            vertices   = new_vertices;
                            tex_coords = new_tex_coords;
                        }

                        // ======== TRANSFORM ========

                        if (m_xform.IsValid)
                        {
                            Parallel.For(0, vertices.Length - 1, (i) =>
                            {
                                vertices[i].Transform(m_xform);
                            });
                        }

                        // ======== CLIP TO BOX ========

                        if (m_clipping_box.IsValid &&
                            m_clipping_box.X.Length > 0 &&
                            m_clipping_box.Y.Length > 0 &&
                            m_clipping_box.Z.Length > 0)
                        {
                            Point3d box_centre = m_clipping_box.Plane.Origin;
                            double  minx = m_clipping_box.X.Min + box_centre.X, maxx = m_clipping_box.X.Max + box_centre.X;
                            double  miny = m_clipping_box.Y.Min + box_centre.Y, maxy = m_clipping_box.Y.Max + box_centre.Y;
                            double  minz = m_clipping_box.Z.Min + box_centre.Z, maxz = m_clipping_box.Z.Max + box_centre.Z;

                            var flags    = new bool[vertices.Length];
                            int new_size = 0;
                            for (int i = 0; i < vertices.Length; ++i)
                            {
                                if (
                                    vertices[i].X <maxx && vertices[i].X> minx &&
                                    vertices[i].Y <maxy && vertices[i].Y> miny &&
                                    vertices[i].Z <maxz && vertices[i].Z> minz
                                    )
                                {
                                    flags[i] = true;
                                    new_size++;
                                }
                            }

                            var new_vertices   = new Point3f[new_size];
                            var new_tex_coords = new Point2f[new_size];

                            for (int i = 0, j = 0; i < vertices.Length; ++i)
                            {
                                if (flags[i])
                                {
                                    new_vertices[j]   = vertices[i];
                                    new_tex_coords[j] = tex_coords[i];
                                    ++j;
                                }
                            }

                            vertices   = new_vertices;
                            tex_coords = new_tex_coords;
                        }

                        Debug.Assert(vertices.Length == tex_coords.Length);

                        var point_colors = GetPointColors(color, tex_coords);

                        RPointCloud new_pointcloud = new RPointCloud();
                        new_pointcloud.AddRange(vertices.Select(x => new Point3d(x)), point_colors);

                        lock (m_pointcloud)
                            m_pointcloud = new_pointcloud;
                    }
                }
                catch (Exception e)
                {
                    RhinoApp.WriteLine("RsToolkit: " + e.Message);
                    m_is_on = false;
                    break;
                }
            }

            RhinoApp.WriteLine("RsToolkit: Task cancelled.");

            if (pipeline != null)
            {
                pipeline.Stop();
            }
        }
コード例 #16
0
ファイル: RealSenseSource.cs プロジェクト: Tott11/face-demo
    public override ImageAndDepth Get()
    {
        ImageAndDepth res = new ImageAndDepth();

        using (FrameSet frameset = pipeline.WaitForFrames())
        {
            DepthFrame depth_frame = frameset.FirstOrDefault <DepthFrame>(Stream.Depth, Format.Z16).DisposeWith(frameset);

            if (depth_frame != null && ((long)depth_frame.Number) > prev_depth_frame_number)
            {
                prev_depth_frame_number = (long)depth_frame.Number;

                FrameSet registered = align_to_color.Process(frameset).As <FrameSet>().DisposeWith(frameset);

                depth_frame = registered.FirstOrDefault <DepthFrame>(Stream.Depth, Format.Z16).DisposeWith(frameset);

                MAssert.Check(depth_frame != null);
                MAssert.Check(depth_frame.BitsPerPixel == 16);
                MAssert.Check(depth_frame.Stride >= depth_frame.Width * 2);

                float[] fov = depth_frame.Profile.As <VideoStreamProfile>().GetIntrinsics().FOV;

                res.depth_opts.horizontal_fov            = fov[0];
                res.depth_opts.vertical_fov              = fov[1];
                res.depth_opts.depth_unit_in_millimeters = depth_scale;


                res.depth_opts.depth_map_2_image_offset_x = 0;
                res.depth_opts.depth_map_2_image_offset_y = 0;
                res.depth_opts.depth_map_2_image_scale_x  = 1;
                res.depth_opts.depth_map_2_image_scale_y  = 1;

                res.depth_opts.depth_map_rows = depth_frame.Height;
                res.depth_opts.depth_map_cols = depth_frame.Width;

                res.depth_timestamp_microsec = (ulong)depth_frame.Timestamp * 1000;

                res.depth = new OpenCvSharp.MatOfUShort(
                    depth_frame.Height,
                    depth_frame.Width,
                    depth_frame.Data,
                    depth_frame.Stride).Clone();
            }


            VideoFrame video_frame = frameset.FirstOrDefault <VideoFrame>(Stream.Color, Format.Bgr8).DisposeWith(frameset);

            if (video_frame != null && ((long)video_frame.Number > prev_video_frame_number))
            {
                prev_video_frame_number = (long)video_frame.Number;

                MAssert.Check(video_frame.BitsPerPixel == 24);
                MAssert.Check(video_frame.Stride >= video_frame.Width * 3);

                res.image = new OpenCvSharp.Mat(
                    video_frame.Height,
                    video_frame.Width,
                    OpenCvSharp.MatType.CV_8UC3,
                    video_frame.Data,
                    video_frame.Stride).Clone();

                res.image_timestamp_microsec = (ulong)video_frame.Timestamp * 1000;
            }
        }

        return(res);
    }