コード例 #1
0
    public override ImageAndDepth Get()
    {
        ImageAndDepth res = new ImageAndDepth();

        res.image = this.capturer.RetrieveMat();
        return(res);
    }
コード例 #2
0
    // here we grab frames from the sources
    // and put them in a VideoWorker
    void CameraThreadFunc()
    {
        try
        {
            TimeSpan prev_capture_time = new TimeSpan(DateTime.Now.Ticks);

            for (;;)
            {
                // check for stop
                if (_shutdown)
                {
                    break;
                }

                // sleep some time to get stable frequency
                if (_frame_min_delay_ms > 0)
                {
                    TimeSpan now = new TimeSpan(DateTime.Now.Ticks);

                    // 40ms - for 24 fps
                    int sleep_time =
                        (int)(_frame_min_delay_ms - (now - prev_capture_time).TotalMilliseconds);

                    if (sleep_time > 0)
                    {
                        Thread.Sleep(sleep_time);
                    }

                    prev_capture_time = new TimeSpan(DateTime.Now.Ticks);
                }


                ImageAndDepth frame = _source.Get();

                // put a frame in the VideoWorker
                if (!frame.image.Empty())
                {
                    // get the frame
                    byte[] data = new byte[frame.image.Total() * frame.image.Type().Channels];
                    Marshal.Copy(frame.image.DataStart, data, 0, (int)data.Length);
                    RawImage raw_image = new RawImage(frame.image.Width, frame.image.Height, RawImage.Format.FORMAT_BGR, data);

                    _frames_mutex.WaitOne();

                    int frame_id = _video_worker.addVideoFrame(raw_image, _stream_id);

                    // and store it here for further drawing
                    _frames.Enqueue(new Tuple <int, ImageAndDepth>(frame_id, frame));
                    _frames_mutex.ReleaseMutex();
                }
            }
        }
        catch (Exception e)
        {
            Console.WriteLine("\n WCameraThreadFunc exception: '{0}'\n", e.ToString());
        }
    }
コード例 #3
0
    void TrackingCallback(
        VideoWorker.TrackingCallbackData data,
        Object userdata)
    {
        // check arguments
        MAssert.Check(data != null);
        MAssert.Check(userdata != null);

        int stream_id = (int)data.stream_id;
        int frame_id  = (int)data.frame_id;

        RawSample[] samples         = data.samples;
        bool[]      samples_weak    = data.samples_weak;
        float[]     samples_quality = data.samples_quality;

        MAssert.Check(samples.Length == samples_weak.Length);
        MAssert.Check(samples.Length == samples_quality.Length);

        // userdata is supposed to be pointer to this Worker
        // so cast the pointer
        Worker worker = (Worker)(userdata);

        // we care only about the worker._stream_id source
        // so just ignore any others
        if (stream_id != worker._stream_id)
        {
            return;
        }

        // get the frame with frame_id id
        ImageAndDepth frame = new ImageAndDepth();

        {
            worker._frames_mutex.WaitOne();

            // searching in worker._frames
            for (;;)
            {
                // there already must be some frames
                MAssert.Check(worker._frames.Count > 0);

                // this frame_id can't be from future
                MAssert.Check(worker._frames.Peek().Item1 <= frame_id);

                if (worker._frames.Peek().Item1 == frame_id)
                {
                    // we found it
                    frame = worker._frames.Dequeue().Item2;
                    break;
                }
                else
                {
                    // this (i.e. the worker._frames.front() ) frame was skipeed by tracking
                    Console.WriteLine("skiped {0}:{1}", stream_id, worker._frames.Dequeue().Item1);
                }
            }
            worker._frames_mutex.ReleaseMutex();
        }

        // update the data
        {
            worker._drawing_data_mutex.WaitOne();

            // frame

            worker._drawing_data.frame    = frame.image;
            worker._drawing_data.depth    = frame.depth;
            worker._drawing_data.frame_id = frame_id;
            worker._drawing_data.updated  = true;

            // and samples
            for (int i = 0; i < samples.Length; ++i)
            {
                if (!worker._drawing_data.faces.ContainsKey(samples[i].getID()))
                {
                    worker._drawing_data.faces[samples[i].getID()] = new FaceData();
                }
                FaceData face = worker._drawing_data.faces[samples[i].getID()];
                face.frame_id = samples[i].getFrameID();                  // it can differ from the frame_id
                face.lost     = false;
                face.weak     = samples_weak[i];
                face.sample   = samples[i];
            }
            worker._drawing_data_mutex.ReleaseMutex();
        }
    }
コード例 #4
0
ファイル: RealSenseSource.cs プロジェクト: Tott11/face-demo
    public override ImageAndDepth Get()
    {
        ImageAndDepth res = new ImageAndDepth();

        using (FrameSet frameset = pipeline.WaitForFrames())
        {
            DepthFrame depth_frame = frameset.FirstOrDefault <DepthFrame>(Stream.Depth, Format.Z16).DisposeWith(frameset);

            if (depth_frame != null && ((long)depth_frame.Number) > prev_depth_frame_number)
            {
                prev_depth_frame_number = (long)depth_frame.Number;

                FrameSet registered = align_to_color.Process(frameset).As <FrameSet>().DisposeWith(frameset);

                depth_frame = registered.FirstOrDefault <DepthFrame>(Stream.Depth, Format.Z16).DisposeWith(frameset);

                MAssert.Check(depth_frame != null);
                MAssert.Check(depth_frame.BitsPerPixel == 16);
                MAssert.Check(depth_frame.Stride >= depth_frame.Width * 2);

                float[] fov = depth_frame.Profile.As <VideoStreamProfile>().GetIntrinsics().FOV;

                res.depth_opts.horizontal_fov            = fov[0];
                res.depth_opts.vertical_fov              = fov[1];
                res.depth_opts.depth_unit_in_millimeters = depth_scale;


                res.depth_opts.depth_map_2_image_offset_x = 0;
                res.depth_opts.depth_map_2_image_offset_y = 0;
                res.depth_opts.depth_map_2_image_scale_x  = 1;
                res.depth_opts.depth_map_2_image_scale_y  = 1;

                res.depth_opts.depth_map_rows = depth_frame.Height;
                res.depth_opts.depth_map_cols = depth_frame.Width;

                res.depth_timestamp_microsec = (ulong)depth_frame.Timestamp * 1000;

                res.depth = new OpenCvSharp.MatOfUShort(
                    depth_frame.Height,
                    depth_frame.Width,
                    depth_frame.Data,
                    depth_frame.Stride).Clone();
            }


            VideoFrame video_frame = frameset.FirstOrDefault <VideoFrame>(Stream.Color, Format.Bgr8).DisposeWith(frameset);

            if (video_frame != null && ((long)video_frame.Number > prev_video_frame_number))
            {
                prev_video_frame_number = (long)video_frame.Number;

                MAssert.Check(video_frame.BitsPerPixel == 24);
                MAssert.Check(video_frame.Stride >= video_frame.Width * 3);

                res.image = new OpenCvSharp.Mat(
                    video_frame.Height,
                    video_frame.Width,
                    OpenCvSharp.MatType.CV_8UC3,
                    video_frame.Data,
                    video_frame.Stride).Clone();

                res.image_timestamp_microsec = (ulong)video_frame.Timestamp * 1000;
            }
        }

        return(res);
    }