Exemple #1
0
 /// <summary>
 /// Returns the regular old width, height, etc. because static images do not rotate. However, leaving this version of the function out would require some hacky coding over in the SensorField.Update() function.
 /// </summary>
 /// <param name="includeRotatedPixels"></param>
 /// <returns></returns>
 public override RotationPacket getRotationPacketWithoutSensors(bool includeRotatedPixels = false)
 {
     if (includeRotatedPixels)
     {
         return(new RotationPacket(Texture.Width, Texture.Height, VectorHelpers.asXNAPoint(Position), VectorHelpers.asXNAPoint(Position + new Vector2(Texture.Width, Texture.Height)), TextureAsColorArray));
     }
     else
     {
         return(new RotationPacket(Texture.Width, Texture.Height, VectorHelpers.asXNAPoint(Position), VectorHelpers.asXNAPoint(Position + new Vector2(Texture.Width, Texture.Height))));
     }
 }
Exemple #2
0
        /// <summary>
        /// Returns the color contents of the area defined by the sensor field. This output will feed into the neural controller inputs.
        /// The neural controller has 3 input layers, which are arranged on the same horizontal level. The current version of this function
        /// assumes the sensor field is a square so that the sensor data can meaningfully be stored in a 2D array. A better version of this
        /// function will be made later so that sensor fields with different shapes are supported.
        /// </summary>
        /// <param name="creaturePosition">The absolute position of the creature in the world, measured from the upper left corner.</param>
        public void Update(Creature creature, RotationPacket creaturePacket)
        {
            // (These two variables don't factor into the actual sensing, but they are necessary if we want to visualize the sensor contents.)
            Color[] sensorContentsPixels = new Color[Simulator.FieldTexture.Width * Simulator.FieldTexture.Height];
            for (int row = 0; row < Simulator.FieldTexture.Height; row++)
            {
                for (int col = 0; col < Simulator.FieldTexture.Width; col++)
                {
                    sensorContentsPixels[col + (row * Simulator.FieldTexture.Width)] = Color.Transparent;
                }
            }

            // Reset the sensors
            resetArrays();

            // Find the new bounding box coordinates using the new creatureCenter
            List <Image> intersectingImages;

            if (Simulator.blindCreatures)
            {
                intersectingImages = new List <Image>();
                intersectingImages.Add(Simulator.initialBackground);
            }
            else if (Simulator.everyoneCanPlant)
            {
                intersectingImages = new List <Image>();
                intersectingImages.Add(Simulator.backgroundImage);
            }
            else
            {
                intersectingImages = updateIntersectingImages(creature, creaturePacket);
            }

            // Grab the rotation packets for each of the images in intersectingImages
            List <RotationPacket> rotationPackets = new List <RotationPacket>();

            for (int i = 0; i < intersectingImages.Count; i++)
            {
                rotationPackets.Add(intersectingImages[i].getRotationPacketWithoutSensors(true));
            }


            // Find the global coordinates of the creature's position and the sensor field's NW coordinate,
            // along with the local coordinates of the creature's center
            Point   creaturePosition = VectorHelpers.asXNAPoint(creature.Position);
            Point   sensorNWCoord    = new Point(creaturePosition.X + creature.SensorFieldAnchorPoint.X - (SegmentLength * ResolutionX) / 2, creaturePosition.Y + creature.SensorFieldAnchorPoint.Y - (SegmentLength * ResolutionY) / 2);
            Vector2 creatureCenter   = new Vector2(creature.Texture.Width / 2, creature.Texture.Height / 2);

            // Iterate through the sensor field texel positions and query for non-transparent pixels underneath the field
            Color worldTexel;
            int   indexIntoArray;

            for (int x = 0; x < SegmentLength; x++)
            {
                for (int y = 0; y < SegmentLength; y++)
                {
                    // Get the global position that corresponds to the internal (x,y) coordinates
                    Point globalSensorCoord = MathHelpers.rotateAroundPoint(sensorNWCoord.X + SensorArray[x, y].X, sensorNWCoord.Y + SensorArray[x, y].Y, Convert.ToInt32(creature.Position.X + creatureCenter.X), Convert.ToInt32(creature.Position.Y + creatureCenter.Y), creature.XNAHeading);

                    // Loop through each image, starting with the one that's closest to (yet still underneath) the sensor field
                    for (int i = 0; i < intersectingImages.Count; i++)
                    {
                        // Check to make sure the image actually has a pixel located at the (global) x,y we're checking
                        Point imageNW = rotationPackets[i].NWCoord;
                        Point imageSE = rotationPackets[i].SECoord;
                        if (globalSensorCoord.X >= imageNW.X && globalSensorCoord.X < imageSE.X && globalSensorCoord.Y >= imageNW.Y && globalSensorCoord.Y < imageSE.Y)
                        {
                            indexIntoArray = (globalSensorCoord.X - imageNW.X + ((globalSensorCoord.Y - imageNW.Y) * rotationPackets[i].NewWidth));
                            worldTexel     = rotationPackets[i].RotatedPixels[indexIntoArray];
                            if (worldTexel.A != 0)
                            {
                                // If we get here, the sensor field is working as it should!
                                sensedSomething = true;

                                // Store the sensed RGB values in an ANN-friendly format
                                R[x, y] = MathHelpers.Scale(worldTexel.R, 0, 255, -1, 1);
                                G[x, y] = MathHelpers.Scale(worldTexel.G, 0, 255, -1, 1);
                                B[x, y] = MathHelpers.Scale(worldTexel.B, 0, 255, -1, 1);

                                // If we're visualizing the sensor field contents, we need to update the texture-to-be-drawn also
                                if (Simulator.drawSensorField && (!Simulator.depthTest || creature.ID == Simulator.manuallyControlledCreatureID))
                                {
                                    sensorContentsPixels[x + ((y * ResolutionX) * SegmentLength)] = worldTexel;
                                }

                                // Then stop looping through the images and move on to the next texel location
                                break;
                            }
                        }
                    }
                }
            }

            // If we want to visualize the sensor field, we have to do it before we dispose of the temporary texture
            if (Simulator.drawSensorField)
            {
                Simulator.sensorContentsTexture.SetData(sensorContentsPixels);
            }

            // Raise an exception if the sensors are broken
            if (!sensedSomething)
            {
                throw new Exception("Problem: Sensor.Sense() looped through the pixels in rotatedSensorField without finding any pixels with a nonzero alpha value.");
            }

            // Reset the exception flag
            sensedSomething = false;
        }
Exemple #3
0
        /// <summary>
        /// Returns the regular old width, height, etc. because static images do not rotate. owever, leaving this version of the function out would require some hacky coding over in the SensorField.Update() function.
        /// </summary>
        /// <returns></returns>
        public override RotationPacket getRotationPacket()
        {
            Point position = VectorHelpers.asXNAPoint(Position);

            return(new RotationPacket(Texture.Width, Texture.Height, position, new Point(position.X + Texture.Width, position.Y + Texture.Height)));
        }