Beispiel #1
0
        public void forward()
        {
            // in forward pass the agent simply behaves in the environment
            // create input to brain
            int num_eyes = this.eyes.Count;

            double[] input_array = new double[num_eyes * 3];
            for (int i = 0; i < num_eyes; i++)
            {
                Eye e = this.eyes[i];
                input_array[i * 3]     = 1.0;
                input_array[i * 3 + 1] = 1.0;
                input_array[i * 3 + 2] = 1.0;
                if (e.sensed_type != -1)
                {
                    // sensed_type is 0 for wall, 1 for food and 2 for poison.
                    // lets do a 1-of-k encoding into the input array
                    input_array[i * 3 + e.sensed_type] = e.sensed_proximity / e.max_range; // normalize to [0,1]
                }
            }

            Volume input = new Volume(num_eyes, 3, 1);

            input.w = input_array;

            // get action from brain
            int actionix = this.brain.forward(input);
            int action   = this.actions[actionix];

            this.actionix = actionix; //back this up

            // demultiplex into behavior variables
            //this.rot1 = action[0] * 1;
            //this.rot2 = action[1] * 1;
            SuperHexagon.GetInstance().setMovement(action);
        }
Beispiel #2
0
        public void tick()
        {
            // tick the environment
            this.clock++;

            // fix input to all agents based on environment process eyes
            this.collpoints = new List <Intersect>();
            for (int i = 0, n = this.agents.Count; i < n; i++)
            {
                Agent a = this.agents[i];
                a.angle = SuperHexagon.GetInstance().getPlayerAngle();
                if (SuperHexagon.GetInstance().isDead())
                {
                    SuperHexagon.GetInstance().resetState();
                }
                for (int ei = 0, ne = a.eyes.Count; ei < ne; ei++)
                {
                    Eye e = a.eyes[ei];
                    Dictionary <int, HyperHexagon.Wall> walls = SuperHexagon.GetInstance().getForeseeableWalls();
                    try {
                        e.sensed_proximity = walls[i].distance;
                        e.sensed_type      = 2;//2 for poison
                    } catch (Exception) {
                        e.sensed_type      = -1;
                        e.sensed_proximity = e.max_range;
                    }
                    // we have a line from p to p->eyep
                    //Vec eyep = new Vec(a.p.x + e.max_range * Math.Sin(a.angle + e.slot * 60), a.p.y + e.max_range * Math.Cos(a.angle + e.slot * 60));
                    //Intersect res = this.stuff_collide_(a.p, eyep, true, true);
                    //if (res.intersect) {
                    //    // eye collided with wall
                    //    e.sensed_proximity = res.up.dist_from(a.p);
                    //    e.sensed_type = res.type;
                    //} else {
                    //    e.sensed_proximity = e.max_range;
                    //    e.sensed_type = -1;
                    //}
                }
            }

            // let the agents behave in the world based on their input
            for (int i = 0, n = this.agents.Count; i < n; i++)
            {
                this.agents[i].forward();
            }

            // apply outputs of agents on evironment
            for (int i = 0, n = this.agents.Count; i < n; i++)
            {
                Agent a = this.agents[i];
                //a.oldPosition = a.position; // back up old position
                a.oldAngle = a.angle; // and angle

                // steer the agent according to outputs of wheel velocities

                //Vec v = new Vec(0, a.rad / 2.0);
                //v = v.rotate(a.angle + Math.PI / 2);
                //Vec w1p = a.p.add(v); // positions of wheel 1 and 2
                //Vec w2p = a.p.sub(v);
                //Vec vv = a.p.sub(w2p);
                //vv = vv.rotate(-a.rot1);
                //Vec vv2 = a.p.sub(w1p);
                //vv2 = vv2.rotate(a.rot2);
                //Vec np = w2p.add(vv);
                //np.scale(0.5);
                //Vec np2 = w1p.add(vv2);
                //np2.scale(0.5);
                //a.p = np.add(np2);

                //a.angle -= a.rot1;
                //if (a.angle < 0)
                //    a.angle += 2 * Math.PI;
                //a.angle += a.rot2;
                //if (a.angle > 2 * Math.PI)
                //    a.angle -= 2 * Math.PI;



                // agent is trying to move from p to op. Check walls
                //Intersect res = this.stuff_collide_(a.oldPosition, a.position, true, false);
                //if (res.intersect) {
                //    // wall collision! reset position
                //    a.position = a.oldPosition;
                //}

                //// handle boundary conditions
                //if (a.position.x < 0)
                //    a.position.x = 0;
                //if (a.position.x > this.W)
                //    a.position.x = this.W;
                //if (a.position.y < 0)
                //    a.position.y = 0;
                //if (a.position.y > this.H)
                //    a.position.y = this.H;
            }

            // tick all items
            //bool update_items = false;
            //for (int i = 0, n = this.items.Count; i < n; i++) {
            //    Item it = this.items[i];
            //    it.age += 1;

            //    // see if some agent gets lunch
            //    for (int j = 0, m = this.agents.Count; j < m; j++) {
            //        Agent a = this.agents[j];
            //        double d = a.position.dist_from(it.p);
            //        if (d < it.rad + a.rad) {
            //            // wait lets just make sure that this isn't through a wall
            //            Intersect rescheck = this.stuff_collide_(a.position, it.p, true, false);
            //            if (!rescheck.intersect) {
            //                // ding! nom nom nom
            //                if (it.type == 1)
            //                    a.digestion_signal += 5.0; // mmm delicious apple
            //                if (it.type == 2)
            //                    a.digestion_signal += -6.0; // ewww poison
            //                it.cleanup_ = true;
            //                update_items = true;
            //                break; // break out of loop, item was consumed
            //            }
            //        }
            //    }

            //    if (it.age > 5000 && this.clock % 100 == 0 && util.randf(0, 1) < 0.1) {
            //        it.cleanup_ = true; // replace this one, has been around too long
            //        update_items = true;
            //    }
            //}
            //if (update_items) {
            //    List<Item> nt = new List<Item>();
            //    for (int i = 0, n = this.items.Count; i < n; i++) {
            //        Item it = this.items[i];
            //        if (!it.cleanup_)
            //            nt.Add(it);
            //    }
            //    this.items = nt; // swap
            //}
            //if (this.items.Count < 30 && this.clock % 10 == 0 && util.randf(0, 1) < 0.25) {
            //    double newitx = util.randf(20, this.W - 20);
            //    double newity = util.randf(20, this.H - 20);
            //    int newitt = (int)util.randi(1, 3); // food or poison (1 and 2)
            //    Item newit = new Item(newitx, newity, newitt);
            //    this.items.Add(newit);
            //}

            // agents are given the opportunity to learn based on feedback of their action on environment
            for (int i = 0, n = this.agents.Count; i < n; i++)
            {
                this.agents[i].backward();
            }
        }