示例#1
0
		public void CreateRay()
		{
			stereoModel model = new stereoModel();
			evidenceRay ray = model.createRay(20,10,5,0,255,255,255);
			Assert.IsNotNull(ray.vertices);
			Assert.IsNotNull(ray.vertices[0]);
			Assert.IsNotNull(ray.vertices[1]);
		}
示例#2
0
        /// <summary>
        /// initialise with the given number of stereo cameras
        /// </summary>
        /// <param name="no_of_stereo_cameras">the number of stereo cameras on the robot (not the total number of cameras)</param>
        /// <param name="rays_per_stereo_camera">the number of rays which will be thrown from each stereo camera per time step</param>
        private void init(
		    int no_of_stereo_cameras, 
            int rays_per_stereo_camera)
        {
            this.no_of_stereo_cameras = no_of_stereo_cameras;

            // head and shoulders
            head = new stereoHead(no_of_stereo_cameras);

            // sensor model used for mapping
            inverseSensorModel = new stereoModel();

            // set the number of stereo features to be detected and inserted into the grid
            // on each time step.  This figure should be large enough to get reasonable
            // detail, but not so large that the mapping consumes a huge amount of 
            // processing resource
            inverseSensorModel.no_of_stereo_features = rays_per_stereo_camera;

            // add local occupancy grid
            createLocalGrid();
	
            // create a motion model for each possible grid
            motion = new motionModel(this, LocalGrid, 100);
            
            // zero encoder positions
            prev_left_wheel_encoder = 0;
            prev_right_wheel_encoder = 0;
        }
示例#3
0
		public void CreateObservation()
		{			
		    float baseline = 120;
		    int image_width = 320;
		    int image_height = 240;
		    float FOV_degrees = 68;
			int no_of_stereo_features = 10;
			float[] stereo_features = new float[no_of_stereo_features*4];
			byte[] stereo_features_colour = new byte[no_of_stereo_features*3];
			bool translate = false;
			
			for (int i = 0; i < no_of_stereo_features; i++)
			{
				stereo_features[i*4] = 1;
				stereo_features[i*4+1] = i * image_width / no_of_stereo_features;
				stereo_features[i*4+2] = image_height/2;
				stereo_features[i*4+3] = 1;
				stereo_features_colour[i*3] = 200;
				stereo_features_colour[i*3+1] = 200;
				stereo_features_colour[i*3+2] = 200;
			}
			
			for (int rotation_degrees = 0; rotation_degrees < 360; rotation_degrees += 90)
			{
				stereoModel model = new stereoModel();
				pos3D observer = new pos3D(0,0,0);
				observer = observer.rotate(rotation_degrees/180.0f*(float)Math.PI,0,0);	
	            List<evidenceRay> rays = model.createObservation(
			        observer,
			        baseline,
			        image_width,
			        image_height,
			        FOV_degrees,
			        stereo_features,
			        stereo_features_colour,
			        translate);
				
				float tx = float.MaxValue;
				float ty = float.MaxValue;
				float bx = float.MinValue;
				float by = float.MinValue;
				for (int i = 0; i < no_of_stereo_features; i++)
				{
					//float pan_degrees = rays[i].pan_angle * 180 / (float)Math.PI; 
					//Console.WriteLine(pan_degrees.ToString());
					for (int j = 0; j < rays[i].vertices.Length; j++)
					{
					    Console.WriteLine("Vertex " + j.ToString());
					    Console.WriteLine("xyz: " + rays[i].vertices[j].x.ToString() + " " + rays[i].vertices[j].y.ToString() + " " + rays[i].vertices[j].z.ToString());
						
						if (rays[i].vertices[j].x < tx) tx = rays[i].vertices[j].x;
						if (rays[i].vertices[j].x > bx) bx = rays[i].vertices[j].x;
	
						if (rays[i].vertices[j].y < ty) ty = rays[i].vertices[j].y;
						if (rays[i].vertices[j].y > by) by = rays[i].vertices[j].y;
					}
				}
				
				int img_width = 640;
				Bitmap bmp = new Bitmap(img_width, img_width, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
				byte[] img = new byte[img_width * img_width * 3];
				for (int i = 0; i < img.Length; i++) img[i] = 255;
	
				for (int i = 0; i < no_of_stereo_features; i++)
				{
					int x0 = (int)((rays[i].vertices[0].x - tx) * img_width / (bx - tx));
					int y0 = (int)((rays[i].vertices[0].y - ty) * img_width / (by - ty));
					int x1 = (int)((rays[i].vertices[1].x - tx) * img_width / (bx - tx));
					int y1 = (int)((rays[i].vertices[1].y - ty) * img_width / (by - ty));
					drawing.drawLine(img, img_width, img_width, x0,y0,x1,y1,0,0,0,0,false);
				}
				
				BitmapArrayConversions.updatebitmap_unsafe(img, bmp);
				bmp.Save("dpslam_tests_createobservation_" + rotation_degrees.ToString() + ".bmp", System.Drawing.Imaging.ImageFormat.Bmp);
				Console.WriteLine("dpslam_tests_createobservation_" + rotation_degrees.ToString() + ".bmp");
			}
		}