Exemple #1
0
        private void EditObjectives()
        {
            var vm = new CustomObjectivesViewModel(PrimaryAgent.Environment);

            vm.InitializeObjectives();

            Views.CustomObjectivesView dialog = new Views.CustomObjectivesView();
            dialog.DataContext = vm;

            var result = dialog.ShowDialog();

            if (result.HasValue &&
                result.Value)
            {
                if (PrimaryAgent.Environment != MazeVm.Maze)
                {
                    PrimaryAgent.Environment = (MazeBase)MazeVm.Maze;
                }

                PrimaryAgent.Environment.AdditionalRewards.Clear();

                foreach (var objective in vm.Objectives)
                {
                    PrimaryAgent.Environment.AddReward(objective);
                }

                if (SecondaryAgent != null)
                {
                    SecondaryAgent.Environment = MazeUtilities.CopyEnvironment(PrimaryAgent.Environment);
                }

                MazeVm.InitializeMaze();
            }
        }
Exemple #2
0
        private void OpenMenuItem_Click(object sender, EventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.Filter = "Maze Files (*.maze)|*.maze";

            if (dlg.ShowDialog() == DialogResult.OK)
            {
                _walls.Clear();
                mazeSpace.Enabled = false;

                var loaded = MazeUtilities.LoadObject <MazeAgent>(dlg.FileName);
                _agentPrimary                 = MazeUtilities.ConvertLoadedAgent(loaded);
                _agentSecondary               = MazeUtilities.ConvertLoadedAgent(loaded);
                _agentSecondary.Environment   = MazeUtilities.CopyEnvironment(loaded.Environment);
                _agentSecondary.StartPosition = _agentPrimary.StartPosition;

                AgentSubscribeEvents(_agentPrimary);
                AgentSubscribeEvents(_agentSecondary);

                SetFormValuesFromMaze();

                _needsRetrain     = true;
                mazeSpace.Enabled = true;
            }
        }
Exemple #3
0
        private void LoadAgent(string path)
        {
            var loaded = MazeUtilities.LoadObject <MazeAgent>(path);

            loaded.Environment.QualitySaveFrequency = 100;
            PrimaryAgent                 = MazeUtilities.ConvertLoadedAgent(loaded);
            SecondaryAgent               = MazeUtilities.ConvertLoadedAgent(loaded);
            SecondaryAgent.Environment   = MazeUtilities.CopyEnvironment(loaded.Environment);
            SecondaryAgent.StartPosition = PrimaryAgent.StartPosition;

            if (PrimaryAgent.LearningStyle == QLearning.Core.LearningStyles.QLearning)
            {
                SecondaryAgent.LearningStyle = QLearning.Core.LearningStyles.SARSA;
            }
            else
            {
                SecondaryAgent.LearningStyle = QLearning.Core.LearningStyles.QLearning;
            }

            OnPropertyChanged(nameof(SelectedLearningStyle));
            OnPropertyChanged(nameof(PrimaryAgent));
            OnPropertyChanged(nameof(SecondaryAgent));
            OnPropertyChanged(nameof(GoalPosition));
            OnPropertyChanged(nameof(MazeVm));
        }
Exemple #4
0
        private void ChangeGoalPosition(int newGoal)
        {
            var space = MazeVm.GetSpaceByPosition(MazeVm.Maze.GoalPosition);

            if (space != null)
            {
                space.IsGoal = false;
            }

            space                    = MazeVm.GetSpaceByPosition(newGoal);
            space.IsGoal             = true;
            MazeVm.Maze.GoalPosition = newGoal;

            if (PrimaryAgent.Environment == null)
            {
                PrimaryAgent.Environment = new MazeBase(MazeVm.Columns, MazeVm.Rows, PrimaryAgent.StartPosition, newGoal, 200);
                PrimaryAgent.Environment.Initialize();
                SecondaryAgent.Environment = MazeUtilities.CopyEnvironment(PrimaryAgent.Environment);
            }
            else
            {
                PrimaryAgent.Environment.GoalPosition   = newGoal;
                SecondaryAgent.Environment.GoalPosition = newGoal;
            }
        }
Exemple #5
0
        private void SaveMenuItem_Click(object sender, EventArgs e)
        {
            SaveFileDialog dlg = new SaveFileDialog();

            dlg.Filter = "Maze Files (*.maze)|*.maze";

            if (dlg.ShowDialog() == DialogResult.OK)
            {
                MazeUtilities.SaveObject(dlg.FileName, _agentPrimary);
            }
        }
Exemple #6
0
        private void SaveAgentMaze()
        {
            var dialog = new SaveFileDialog();

            dialog.Filter = "Agent Maze File|*.maze";
            bool?result = dialog.ShowDialog();

            if (result.HasValue && result.Value == true)
            {
                MazeUtilities.SaveObject(dialog.FileName, PrimaryAgent);
            }
        }
Exemple #7
0
        private void NewAgentMaze()
        {
            PrimaryAgent = new MazeAgent(0.5, 0.5, 1000, 1000, 3);
            PrimaryAgent.MaximumAllowedBacktracks = 3;
            PrimaryAgent.MaximumAllowedMoves      = 1000;

            PrimaryAgent.Environment = new MazeBase(1, 1, 0, 0, 200);

            SecondaryAgent             = MazeUtilities.ConvertLoadedAgent(PrimaryAgent);
            SecondaryAgent.Environment = MazeUtilities.CopyEnvironment(PrimaryAgent.Environment);

            MazeVm = new MazeViewModel(PrimaryAgent.Environment);

            OnPropertyChanged(nameof(PrimaryAgent));
            OnPropertyChanged(nameof(SecondaryAgent));
            OnPropertyChanged(nameof(GoalPosition));
        }
Exemple #8
0
        static void Main(string[] args)
        {
            string filePath = "";

            if (args.Length > 0)
            {
                filePath = args[0];
            }

            if (!File.Exists(filePath))
            {
                Console.WriteLine("File Not Found");
                filePath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), @"MazeImages\braid200.bmp");
            }
            Console.WriteLine($"Using maze image file: {Path.GetFileName(filePath)}");

            Stopwatch s = new Stopwatch();

            s.Start();
            var maze = new Maze(filePath);

            Solver.SolveAStar(maze);
            if (maze.SolutionFound)
            {
                Console.WriteLine($"Solution found in {maze.Solution.Count} steps.");
            }
            else
            {
                Console.WriteLine($"No solution found.");
            }

            MazeUtilities.SaveSolution(maze);
            s.Stop();
            Console.WriteLine("Time Elapsed: " + s.Elapsed.ToString(@"mm\:ss\.fff"));

            Console.WriteLine("Press Enter to close...");
            Console.ReadLine();
        }
Exemple #9
0
        public Task Train()
        {
            List <Task>      tasks  = new List <Task>();
            List <MazeAgent> agents = new List <MazeAgent>();

            if (PrimaryAgent.MaximumAllowedMoves == 0)
            {
                PrimaryAgent.MaximumAllowedMoves = MazeVm.Rows * MazeVm.Columns * 100;
            }

            if (PrimaryAgent.MaximumAllowedBacktracks == 0)
            {
                PrimaryAgent.MaximumAllowedBacktracks = 3;
            }

            if (PrimaryAgent.Environment != MazeVm.Maze)
            {
                PrimaryAgent.Environment = (MazeBase)MazeVm.Maze;
            }

            agents.Add(PrimaryAgent);
            PrimaryAgent.TrainingEpisodeCompleted += AgentTrainingEpisodeCompleted;

            if (UseSecondAgent &&
                SecondaryAgent != null)
            {
                SecondaryAgent.LearningRate             = PrimaryAgent.LearningRate;
                SecondaryAgent.DiscountFactor           = PrimaryAgent.DiscountFactor;
                SecondaryAgent.NumberOfTrainingEpisodes = PrimaryAgent.NumberOfTrainingEpisodes;

                if (PrimaryAgent.LearningStyle == QLearning.Core.LearningStyles.QLearning)
                {
                    SecondaryAgent.LearningStyle = QLearning.Core.LearningStyles.SARSA;
                }
                else
                {
                    SecondaryAgent.LearningStyle = QLearning.Core.LearningStyles.QLearning;
                }

                if (SecondaryAgent.MaximumAllowedMoves == 0)
                {
                    SecondaryAgent.MaximumAllowedMoves = MazeVm.Rows * MazeVm.Columns * 100;
                }

                if (SecondaryAgent.MaximumAllowedBacktracks == 0)
                {
                    SecondaryAgent.MaximumAllowedBacktracks = 3;
                }

                SecondaryAgent.Environment = MazeUtilities.CopyEnvironment((MazeBase)MazeVm.Maze);
                agents.Add(SecondaryAgent);
                SecondaryAgent.TrainingEpisodeCompleted += AgentTrainingEpisodeCompleted;

                SecondaryAgent.Environment.SaveQualityToDisk = false;
            }

            _sessionsVm = null;

            TrainingEpisodesCompleted  = 0;
            TrainingProgressVisibility = Visibility.Visible;

            IsTraining = true;

            var result = Parallel.ForEach(agents, (agent) =>
            {
                TrainAgent(agent);
            });

            while (!result.IsCompleted)
            {
            }

            if (_sessionsVm == null)
            {
                _sessionsVm = new TrainingSessionsViewModel(PrimaryAgent);

                Task.Run(() => _sessionsVm.InitializeSessions());
            }

            IsTraining = false;
            TrainingProgressVisibility = Visibility.Hidden;

            Application.Current.Dispatcher.Invoke(() => GetQualityFromTraining());

            foreach (var agent in agents)
            {
                agent.TrainingEpisodeCompleted -= AgentTrainingEpisodeCompleted;
            }

            return(Task.CompletedTask);
        }
Exemple #10
0
        public void InitializeSessions()
        {
            var trainingSessions = new List <TrainingSessionEx>();
            var sessions         = _agent.TrainingSessions.OrderBy(e => e.Episode).ToList();
            var agent            = MazeUtilities.ConvertLoadedAgent(_agent);

            agent.Environment     = MazeUtilities.CopyEnvironment(_agent.Environment);
            agent.AgentCompleted += Agent_AgentCompleted;

            for (int i = sessions.Count - 1; i >= 0; --i)
            {
                var session = new TrainingSessionEx
                {
                    Episode = sessions[i].Episode,
                    Moves   = sessions[i].Moves,
                    Quality = sessions[i].Quality,
                    Score   = sessions[i].Score
                };

                _moves = 0;
                _score = 0;

                agent.Environment.QualityTable = session.Quality;

                try
                {
                    agent.Run(agent.StartPosition);
                    session.Succeeded = true;
                }
                catch
                {
                    session.Succeeded = false;
                }

                session.Moves = _moves;
                session.Score = _score;

                trainingSessions.Add(session);
            }

            var selection = trainingSessions
                            .GroupBy(g => new
            {
                g.Moves,
                g.Score,
                g.Succeeded
            })
                            .Select(t => new TrainingSessionEx()
            {
                MinEpisode = t.Last().Episode,
                MaxEpisode = t.First().Episode,
                Episode    = t.Last().Episode,
                Moves      = t.Key.Moves,
                Score      = t.Key.Score,
                Succeeded  = t.Key.Succeeded,
                Quality    = t.First().Quality
            });

            trainingSessions = selection
                               .OrderByDescending(s => s.Succeeded)
                               .ThenByDescending(m => m.Moves)
                               .ThenByDescending(e => e.MinEpisode).ToList();

            TrainingSessions = trainingSessions;
            SelectedSession  = TrainingSessions.FirstOrDefault();

            agent.AgentCompleted -= Agent_AgentCompleted;
        }