Esempio n. 1
0
        /** Used to set the velocity of all Agents considering their velocity and goal **/
        public void setPreferredVelocities()
        {
            /*
             * Set the preferred velocity to be a vector of unit magnitude
             * (speed) in the direction of the goal.
             */
            for (int i = 0; i < sim_.getNumAgents(); ++i)
            {
                RVO.Vector2 goalVector = sim_.getAgentGoal(i) - sim_.getAgentPosition(i);

                if (RVO.Vector2.absSq(goalVector) > 1.0f)
                {
                    goalVector = RVO.Vector2.normalize(goalVector);
                }

                sim_.setAgentPrefVelocity(i, goalVector);

                /* Perturb a little to avoid deadlocks due to perfect symmetry. */
                float angle = (float)random.NextDouble() * 2.0f * (float)Math.PI;
                float dist  = (float)random.NextDouble() * 0.0001f;

                sim_.setAgentPrefVelocity(i, sim_.getAgentPrefVelocity(i) +
                                          dist * new RVO.Vector2((float)Math.Cos(angle), (float)Math.Sin(angle)));
            }
        }
Esempio n. 2
0
        // Save the data of each agent (position, velocity, acceleration, distance with the closer, local density and agent's leader
        internal static void saveData(RVOSimulator sim, bool looped, bool follow)
        {
            string name = "Data/n";
            string num  = sim.getNumAgents().ToString();

            name += num;
            if (follow)
            {
                name += "f";
            }
            name += "_agents_data.csv";
            System.IO.Directory.CreateDirectory("Data/");
            if (!File.Exists(name))
            {
                File.Create(name).Dispose();
                using (TextWriter tw = new StreamWriter(name))
                {
                    tw.WriteLine("Agent \t Position X \t  Position Y \t Velocity X \t Velocity Y \t  Acceleration X + \t  Acceleration Y + \t Closer Agent Distance  \t LocalDensity 2  \t  LocalDensity 9  \t AgentLeaderNo \t Heure");
                }
            }


            for (int i = 0; i < sim.getNumAgents(); i++)
            {
                using (TextWriter tw = new StreamWriter(name, true))
                {
                    tw.WriteLine(i + "\t" + sim.getAgentPosition(i).x() + "\t" + sim.getAgentPosition(i).y() + "\t" + sim.getAgentVelocity(i).x() + "\t" + sim.getAgentVelocity(i).y() + "\t" + sim.getAgentAcceleration(i).x() + "\t" + sim.getAgentAcceleration(i).y() + "\t"
                                 + sim.getAgentDistanceWithCloserAgent(i, looped) + '\t'
                                 + sim.getAgentLocalDensity(i, looped, 2) + '\t' + sim.getAgentLocalDensity(i, looped, 9) + '\t'
                                 + sim.getAgentLeaderNo(i) + "\t" + DateTime.Now);
                }
            }
        }
Esempio n. 3
0
        // Save the agent's data when it attempts the end of the corridor
        //Not used
        static void saveAgentData(RVOSimulator sim, int agentNo, bool follow)
        {
            string name = "Data/n";
            string num  = sim.getNumAgents().ToString();

            name += num;
            if (follow)
            {
                name += "f";
            }
            name += "_end_data.csv";


            System.IO.File.WriteAllText(@name, agentNo.ToString() + '\t' + sim.getAgentCptDist(agentNo).ToString() + '\t' + sim.getAgentCptTime(agentNo));
        }
Esempio n. 4
0
 /**
  * <summary>Builds an agent k-D tree.</summary>
  */
 internal KdTree(RVOSimulator sim)
 {
     sim_       = sim;
     agentTree_ = new AgentTreeNode[sim_.getNumAgents()];
 }