Beispiel #1
0
        private void timUpdate_Tick(object sender, EventArgs e)
        {
            bool image_loaded;
            int wdth;
            int hght;

            if ((left_imageloaded) || (left_camera_running))
            {
                //get images from the two cameras
                captureCameraImages();

                //record depth images if necessary
                //recordDepthImages();

                if ((!test.frame_rate_warning) || (!test.enable_mapping))
                {
                    if (!test.enable_mapping)
                        cmdBeginTracking.Visible = true;
                    else
                        cmdBeginTracking.Visible = false;

                    lblTracking.Visible = !cmdBeginTracking.Visible;
                    lblFrameRateWarning.Visible = false;
                }
                else
                {
                    lblTracking.Visible = false;
                    lblFrameRateWarning.Visible = true;
                }


                if (simulation_mode)
                {
                    //update from loaded images
                    image_loaded = loadVideoFrame(video_path, video_frame_number);
                    if (image_loaded)
                    {
                        wdth = picLeftImage.Image.Width;
                        hght = picLeftImage.Image.Height;

                        if (global_variables.left_bmp == null)
                        {
                            global_variables.left_bmp = new Byte[wdth * hght * 3];
                            raw_image = new classimage_mono();
                            raw_image.createImage(wdth, hght);
                            raw_image_colour = new classimage();
                            raw_image_colour.createImage(wdth, hght);
                        }

                        updatebitmap((Bitmap)picLeftImage.Image, global_variables.left_bmp);

                        raw_image_colour.updateFromBitmap(global_variables.left_bmp, 1, wdth, hght);
                        raw_image.copyImage(raw_image_colour);

                        if (!outputInitialised)
                        {
                            picOutput1.Image = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            background_bitmap = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            disp_bmp_data = new Byte[global_variables.standard_width * global_variables.standard_height * 3];
                            output_image = new classimage_mono();
                            output_image.createImage(global_variables.standard_width,global_variables.standard_height);
                            outputInitialised = true;
                        }
                        output_image.updateFromImage(raw_image);

                        txtCaptureTime.Text = Convert.ToString(endStopWatch());
                        beginStopWatch();

                        test.GoOneStep(raw_image, raw_image_colour, output_image);

                        txtProcessingTime.Text = Convert.ToString(endStopWatch());
                        txtfps.Text = Convert.ToString(test.frames_per_second);
                        txtSpeed.Text = Convert.ToString(test.speed);

                        beginStopWatch();

                        test.ShowFeatures(output_image, selected_display);
                        output_image.saveToBitmap(disp_bmp_data, global_variables.standard_width, global_variables.standard_height);

                        // either show the output image in the picturebox, or transfer it
                        // to a separate background bitmap for use during 3D rendering
                        if ((selected_display != MonoSLAM.DISPLAY_AUGMENTED_REALITY) ||
                            ((selected_display == MonoSLAM.DISPLAY_AUGMENTED_REALITY) && (!(initialised_3D && test.enable_mapping))))
                        {
                            updatebitmap(disp_bmp_data, (Bitmap)picOutput1.Image);
                            picOutput1.Refresh();
                        }
                        else
                        {
                            updatebitmap(disp_bmp_data, background_bitmap);
                        }

                        // render 3D objects if augmented reality is enabled
                        render3D(d3dDevice);


                        if (!single_step) video_frame_number++;
                    }
                    else
                    {
                        video_frame_number = 1;
                        test.init();
                    }
                }
                else
                {
                    //update from cameras
                    if (captureState[0] == 2)
                    {
                        captureState[0] = 0;
                        captureState[1] = 0;

                        wdth = picLeftImage.Image.Width;
                        hght = picLeftImage.Image.Height;

                        if (raw_image == null)
                        {
                            // initialise raw image object
                            raw_image = new classimage_mono();
                            raw_image.createImage(wdth, hght);
                            raw_image_colour = new classimage();
                            raw_image_colour.createImage(wdth, hght);
                        }
                        
                        // shove the bitmap from the camera into an image object
                        raw_image_colour.updateFromBitmap(global_variables.left_bmp, 1, wdth, hght);
                        raw_image.copyImage(raw_image_colour);

                        if (!outputInitialised)
                        {
                            // initialise image bitmaps
                            picOutput1.Image = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            background_bitmap = new Bitmap(global_variables.standard_width, global_variables.standard_height, PixelFormat.Format24bppRgb);
                            disp_bmp_data = new Byte[global_variables.standard_width * global_variables.standard_height * 3];
                            output_image = new classimage_mono();
                            output_image.createImage(global_variables.standard_width, global_variables.standard_height);
                            outputInitialised = true;
                        }
                        output_image.updateFromImage(raw_image);

                        txtCaptureTime.Text = Convert.ToString(endStopWatch());
                        beginStopWatch();

                        // one processing cycle update
                        test.GoOneStep(raw_image, raw_image_colour, output_image);

                        //show info
                        txtProcessingTime.Text = Convert.ToString(endStopWatch());
                        txtfps.Text = Convert.ToString(test.frames_per_second);
                        txtSpeed.Text = Convert.ToString(test.speed);

                        beginStopWatch();

                        // show crosshair features or overhead map
                        test.ShowFeatures(output_image, selected_display);
                        output_image.saveToBitmap(disp_bmp_data, global_variables.standard_width, global_variables.standard_height);

                        // either show the output image in the picturebox, or transfer it
                        // to a separate background bitmap for use during 3D rendering
                        if ((selected_display != MonoSLAM.DISPLAY_AUGMENTED_REALITY) ||
                            ((selected_display == MonoSLAM.DISPLAY_AUGMENTED_REALITY) && (!(initialised_3D && test.enable_mapping))))
                        {
                            updatebitmap(disp_bmp_data, (Bitmap)picOutput1.Image);
                            picOutput1.Refresh();
                        }
                        else
                        {
                            updatebitmap(disp_bmp_data, background_bitmap);
                        }

                        // render 3D objects if augmented reality is enabled
                        render3D(d3dDevice);

                        if (reset)
                        {
                            test.init();
                            reset = false;
                        }
                    }
                }

            }

        }
Beispiel #2
0
        /// <summary>
        /// Creates a new ImageMonoExtraData to represent the currently-selected image
        /// patch, and also returns its location in the parameter z. The current
        /// image patch is set using set_image_selection_automatically() or manually by the
        /// user using set_image_selection().
        /// </summary>
        /// <param name="z">The measurement vector to be filled in</param>
        /// <returns>The classimage holding this image patch information (created using new, so must be deleted elsewhere), or null if no patch is currently selected.</returns>
        public classimage_mono partially_initialise_point_feature(Vector z)
        {
            if (location_selected_flag) // Patch selected
            {
                // Go and initialise it in scene + 3D  
                classimage_mono hip = new classimage_mono();
                hip.createImage((int)Camera_Constants.BOXSIZE, (int)Camera_Constants.BOXSIZE);

                // Final save of fixated image patch
                copy_into_patch(image, hip, uu, vv);

                // And set measurement
                z.Put(0, uu);
                z.Put(1, vv);

                // return the patch
                return hip;
            }
            else
            {
                // No patch selected
                return null;
            }
        }
Beispiel #3
0
        /// <summary>
        /// Create some default features for use with a target image
        /// </summary>
        private void createDefaultKnownFeatures(String path)
        {
            Byte value=0;
            Byte high_value = 180;
            Byte low_value = 60;
            classimage_mono known_feature = new classimage_mono();
            known_feature.createImage((int)Camera_Constants.BOXSIZE, (int)Camera_Constants.BOXSIZE);

            for (int i = 0; i < 4; i++)
            {
                for (int x = 0; x < Camera_Constants.BOXSIZE; x++)
                {
                    for (int y = 0; y < Camera_Constants.BOXSIZE; y++)
                    {
                        switch (i)
                        {
                            case 0:
                                {
                                    if ((x > Camera_Constants.BOXSIZE / 2) &&
                                        (y > Camera_Constants.BOXSIZE / 2))
                                        value = low_value;
                                    else
                                        value = high_value;
                                    break;
                                }
                            case 1:
                                {
                                    if ((x < Camera_Constants.BOXSIZE / 2) &&
                                        (y > Camera_Constants.BOXSIZE / 2))
                                        value = low_value;
                                    else
                                        value = high_value;
                                    break;
                                }
                            case 2:
                                {
                                    if ((x > Camera_Constants.BOXSIZE / 2) &&
                                        (y < Camera_Constants.BOXSIZE / 2))
                                        value = low_value;
                                    else
                                        value = high_value;
                                    break;
                                }
                            case 3:
                                {
                                    if ((x < Camera_Constants.BOXSIZE / 2) &&
                                        (y < Camera_Constants.BOXSIZE / 2))
                                        value = low_value;
                                    else
                                        value = high_value;
                                    break;
                                }
                        }

                        known_feature.image[x, y] = value;
                    }
                }
                known_feature.SaveAsBitmapMono(path + "known_patch" + Convert.ToString(i) + ".bmp");
            }
        }
Beispiel #4
0
        //**************************Write Image Patch to Disk**************************

        /// <summary>
        /// Save the currently-selected patch to a file.
        /// </summary>
        /// <param name="name"></param>
        public void write_patch(String name)
        {
            classimage_mono hip = new classimage_mono();
            hip.createImage((int)Camera_Constants.BOXSIZE, (int)Camera_Constants.BOXSIZE);

            if (location_selected_flag)
            {
                // Copy the selected patch to the save space patch 
                copy_into_patch(image, hip, uu, vv);
                hip.SaveAsBitmapMono(name);
            }
        }