Пример #1
0
 /// <summary>
 /// load a new image from the camera
 /// </summary>
 /// <param name="imageptr">camera image</param>
 public virtual void load_new_image(classimage_mono imageptr, classimage imageptr_colour, classimage_mono outputimageptr, bool show_ellipses)
 {
     image              = imageptr;
     image_colour       = imageptr_colour;
     outputimage        = outputimageptr;
     this.show_ellipses = show_ellipses;
 }
Пример #2
0
        //------------------------------------------------------------------------------------------------------------------------
        //update from another image
        //------------------------------------------------------------------------------------------------------------------------
        public void updateFromImage(classimage img)
        {
            int x, y, xx, yy;

            for (x = 0; x < width; x++)
            {
                xx = (x * img.width) / width;
                for (y = 0; y < height; y++)
                {
                    yy          = (y * img.height) / height;
                    image[x, y] = img.image[xx, yy, 0];
                }
            }
        }
Пример #3
0
        //---------------------------------------------------------------------------------------------
        //copy from an image
        //---------------------------------------------------------------------------------------------
        public void copyImage(classimage source_img)
        {
            int  x, y;
            Byte v;

            for (x = 0; x < width; x++)
            {
                for (y = 0; y < height; y++)
                {
                    v = (Byte)((source_img.image[x, y, 0] +
                                source_img.image[x, y, 1] +
                                source_img.image[x, y, 2]) / 3);
                    image[x, y] = v;
                }
            }
        }
Пример #4
0
        /// <summary>
        /// show the histogram in an image object
        /// </summary>
        /// <param name="img"></param>
        public void Show(classimage img)
        {
            int prev_x = 0, prev_y = 0;

            img.clear();
            for (int i = 0; i < no_of_levels; i++)
            {
                int x = i * img.width / no_of_levels;
                int y = img.height - (level[i] * (img.height * 8 / 10) / maxval);
                if (i > 0)
                {
                    img.drawLine(prev_x, prev_y, x, y, 0, 255, 0, 0);
                }
                prev_x = x;
                prev_y = y;
            }
        }
Пример #5
0
        //------------------------------------------------------------------------------------------------------------------------
        //  sample the given image within the given bounding box
        //------------------------------------------------------------------------------------------------------------------------
        public void sampleFromImage(classimage example_img, int tx, int ty, int bx, int by)
        {
            int x, y, xx, yy, dx, dy;

            dx = bx - tx;
            dy = by - ty;

            for (x = 0; x < width; x++)
            {
                xx = tx + ((x * dx) / width);

                for (y = 0; y < height; y++)
                {
                    yy = ty + ((y * dy) / height);

                    image[x, y] = example_img.image[xx, yy, 0];
                }
            }
            //updateIntegralImage();
        }
Пример #6
0
        private void timUpdate_Tick(object sender, EventArgs e)
        {
            bool image_loaded;
            int wdth;
            int hght;

            if ((left_imageloaded) || (left_camera_running))
            {
                //get images from the two cameras
                captureCameraImages();

                //record depth images if necessary
                //recordDepthImages();

                if ((!test.frame_rate_warning) || (!test.enable_mapping))
                {
                    if (!test.enable_mapping)
                        cmdBeginTracking.Visible = true;
                    else
                        cmdBeginTracking.Visible = false;

                    lblTracking.Visible = !cmdBeginTracking.Visible;
                    lblFrameRateWarning.Visible = false;
                }
                else
                {
                    lblTracking.Visible = false;
                    lblFrameRateWarning.Visible = true;
                }


                if (simulation_mode)
                {
                    //update from loaded images
                    image_loaded = loadVideoFrame(video_path, video_frame_number);
                    if (image_loaded)
                    {
                        wdth = picLeftImage.Image.Width;
                        hght = picLeftImage.Image.Height;

                        if (global_variables.left_bmp == null)
                        {
                            global_variables.left_bmp = new Byte[wdth * hght * 3];
                            raw_image = new classimage_mono();
                            raw_image.createImage(wdth, hght);
                            raw_image_colour = new classimage();
                            raw_image_colour.createImage(wdth, hght);
                        }

                        updatebitmap((Bitmap)picLeftImage.Image, global_variables.left_bmp);

                        raw_image_colour.updateFromBitmap(global_variables.left_bmp, 1, wdth, hght);
                        raw_image.copyImage(raw_image_colour);

                        if (!outputInitialised)
                        {
                            picOutput1.Image = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            background_bitmap = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            disp_bmp_data = new Byte[global_variables.standard_width * global_variables.standard_height * 3];
                            output_image = new classimage_mono();
                            output_image.createImage(global_variables.standard_width,global_variables.standard_height);
                            outputInitialised = true;
                        }
                        output_image.updateFromImage(raw_image);

                        txtCaptureTime.Text = Convert.ToString(endStopWatch());
                        beginStopWatch();

                        test.GoOneStep(raw_image, raw_image_colour, output_image);

                        txtProcessingTime.Text = Convert.ToString(endStopWatch());
                        txtfps.Text = Convert.ToString(test.frames_per_second);
                        txtSpeed.Text = Convert.ToString(test.speed);

                        beginStopWatch();

                        test.ShowFeatures(output_image, selected_display);
                        output_image.saveToBitmap(disp_bmp_data, global_variables.standard_width, global_variables.standard_height);

                        // either show the output image in the picturebox, or transfer it
                        // to a separate background bitmap for use during 3D rendering
                        if ((selected_display != MonoSLAM.DISPLAY_AUGMENTED_REALITY) ||
                            ((selected_display == MonoSLAM.DISPLAY_AUGMENTED_REALITY) && (!(initialised_3D && test.enable_mapping))))
                        {
                            updatebitmap(disp_bmp_data, (Bitmap)picOutput1.Image);
                            picOutput1.Refresh();
                        }
                        else
                        {
                            updatebitmap(disp_bmp_data, background_bitmap);
                        }

                        // render 3D objects if augmented reality is enabled
                        render3D(d3dDevice);


                        if (!single_step) video_frame_number++;
                    }
                    else
                    {
                        video_frame_number = 1;
                        test.init();
                    }
                }
                else
                {
                    //update from cameras
                    if (captureState[0] == 2)
                    {
                        captureState[0] = 0;
                        captureState[1] = 0;

                        wdth = picLeftImage.Image.Width;
                        hght = picLeftImage.Image.Height;

                        if (raw_image == null)
                        {
                            // initialise raw image object
                            raw_image = new classimage_mono();
                            raw_image.createImage(wdth, hght);
                            raw_image_colour = new classimage();
                            raw_image_colour.createImage(wdth, hght);
                        }
                        
                        // shove the bitmap from the camera into an image object
                        raw_image_colour.updateFromBitmap(global_variables.left_bmp, 1, wdth, hght);
                        raw_image.copyImage(raw_image_colour);

                        if (!outputInitialised)
                        {
                            // initialise image bitmaps
                            picOutput1.Image = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            background_bitmap = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            disp_bmp_data = new Byte[global_variables.standard_width * global_variables.standard_height * 3];
                            output_image = new classimage_mono();
                            output_image.createImage(global_variables.standard_width, global_variables.standard_height);
                            outputInitialised = true;
                        }
                        output_image.updateFromImage(raw_image);

                        txtCaptureTime.Text = Convert.ToString(endStopWatch());
                        beginStopWatch();

                        // one processing cycle update
                        test.GoOneStep(raw_image, raw_image_colour, output_image);

                        //show info
                        txtProcessingTime.Text = Convert.ToString(endStopWatch());
                        txtfps.Text = Convert.ToString(test.frames_per_second);
                        txtSpeed.Text = Convert.ToString(test.speed);

                        beginStopWatch();

                        // show crosshair features or overhead map
                        test.ShowFeatures(output_image, selected_display);
                        output_image.saveToBitmap(disp_bmp_data, global_variables.standard_width, global_variables.standard_height);

                        // either show the output image in the picturebox, or transfer it
                        // to a separate background bitmap for use during 3D rendering
                        if ((selected_display != MonoSLAM.DISPLAY_AUGMENTED_REALITY) ||
                            ((selected_display == MonoSLAM.DISPLAY_AUGMENTED_REALITY) && (!(initialised_3D && test.enable_mapping))))
                        {
                            updatebitmap(disp_bmp_data, (Bitmap)picOutput1.Image);
                            picOutput1.Refresh();
                        }
                        else
                        {
                            updatebitmap(disp_bmp_data, background_bitmap);
                        }

                        // render 3D objects if augmented reality is enabled
                        render3D(d3dDevice);

                        if (reset)
                        {
                            test.init();
                            reset = false;
                        }
                    }
                }

            }

        }
Пример #7
0
        /// <summary>
        /// one update
        /// </summary>
        /// <param name="img"></param>
        public void GoOneStep(classimage_mono img, classimage img_colour, classimage_mono output_img)
        {
            float delta_t;

            MAXIMUM_ANGLE_DIFFERENCE = 3.1415927f * (field_of_vision / 2) / 180.0f;

            //set calibration parameters
            if (calibrating)
            {
                monoslaminterface.set_camera_calibration(9e-06f, lens_distortion, lens_distortion, centre_of__distortion_x, centre_of__distortion_y, 1);
            }

            //load in the current raw image
            Robot r = monoslaminterface.GetRobot();

            r.calibrating = (!enable_mapping) | calibrating;
            r.load_new_image(img, img_colour, output_img, show_ellipses);

            stopwatch.Stop();

            //get the elapsed time in seconds
            delta_t = stopwatch.ElapsedMilliseconds / 1000.0f;

            //if using the simulation set the frame rate at a fixed value
            if (simulation_mode)
            {
                delta_t = 1.0f / 20.0f;
            }

            if (delta_t > 0)
            {
                frames_per_second = (int)((1.0f / delta_t) * 10) / 10.0f;

                //report if its taking a long time
                frame_rate_warning = false;
                if (delta_t > 0.2)
                {
                    frame_rate_warning = true;
                    Debug.WriteLine("Time between frames (sec): " + Convert.ToString(delta_t));
                }

                //update the state of the system
                monoslaminterface.GoOneStep(delta_t, enable_mapping);
            }

            stopwatch.Reset();
            stopwatch.Start();

            speed = (int)(monoslaminterface.Speed() * 100) / 100.0f;

            if ((enable_mapping) && (monoslaminterface.number_of_matched_features < 2))
            {
                enable_mapping = false;
                init();
            }

            if ((!enable_mapping) && (monoslaminterface.number_of_matched_features >= 3) && (!calibrating))
            {
                registered_seconds += delta_t;
                if (registered_seconds > 1)
                {
                    Debug.WriteLine("Ready to go");
                    enable_mapping     = true;
                    registered_seconds = 0;
                }
            }
            else
            {
                if (registered_seconds > 0)
                {
                    Debug.WriteLine("Waiting for initial registration");
                }
                registered_seconds = 0;
            }
        }
Пример #8
0
        /// <summary>
        /// one update
        /// </summary>
        /// <param name="img"></param>
        public void GoOneStep(classimage_mono img, classimage img_colour, classimage_mono output_img)
        {
            float delta_t;
            MAXIMUM_ANGLE_DIFFERENCE = 3.1415927f * (field_of_vision/2) / 180.0f;

            //set calibration parameters
            if (calibrating)
                monoslaminterface.set_camera_calibration(9e-06f, lens_distortion, lens_distortion, centre_of__distortion_x, centre_of__distortion_y, 1);

            //load in the current raw image
            Robot r = monoslaminterface.GetRobot();
            r.calibrating = (!enable_mapping) | calibrating;
            r.load_new_image(img, img_colour, output_img, show_ellipses);

            stopwatch.Stop();

            //get the elapsed time in seconds
            delta_t = stopwatch.ElapsedMilliseconds / 1000.0f;

            //if using the simulation set the frame rate at a fixed value
            if (simulation_mode) delta_t = 1.0f / 20.0f;

            if (delta_t > 0)
            {
                frames_per_second = (int)((1.0f / delta_t) * 10) / 10.0f;

                //report if its taking a long time
                frame_rate_warning = false;
                if (delta_t > 0.2)
                {
                    frame_rate_warning = true;
                    Debug.WriteLine("Time between frames (sec): " + Convert.ToString(delta_t));
                }

                //update the state of the system
                monoslaminterface.GoOneStep(delta_t, enable_mapping);
            }

            stopwatch.Reset();
            stopwatch.Start();

            speed = (int)(monoslaminterface.Speed() * 100) / 100.0f;

            if ((enable_mapping) && (monoslaminterface.number_of_matched_features < 2))
            {
                enable_mapping = false;
                init();
            }

            if ((!enable_mapping) && (monoslaminterface.number_of_matched_features >= 3) && (!calibrating))
            {
                registered_seconds += delta_t;
                if (registered_seconds > 1)
                {
                    Debug.WriteLine("Ready to go");
                    enable_mapping = true;
                    registered_seconds = 0;
                }                
            }
            else
            {
                if (registered_seconds > 0) Debug.WriteLine("Waiting for initial registration");
                registered_seconds = 0;
            }
        }