/// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
				MLTrainFactory.PropertyPopulationSize, false, 5000);
		
		IMLTrain train = new MLMethodGeneticAlgorithm( () => {
			
				IMLMethod result = (IMLMethod) ObjectCloner.DeepCopy(method);
				((IMLResettable)result).Reset();
				return result;
			}, score, populationSize);

		return train;

       
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            BasicNetwork network = CreateNetwork();

            IMLTrain train;

            if (app.Args.Length > 0 && String.Compare(app.Args[0], "anneal", true) == 0)
            {
                train = new NeuralSimulatedAnnealing(
                    network, new PilotScore(), 10, 2, 100);
            }
            else
            {
                train = new MLMethodGeneticAlgorithm( ()=>{
					BasicNetwork result = CreateNetwork();
					((IMLResettable)result).Reset();
					return result;
				},new PilotScore(),500);
            }

            int epoch = 1;

            for (int i = 0; i < 50; i++)
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Score:" + train.Error);
                epoch++;
            }

            Console.WriteLine(@"\nHow the winning network landed:");
            network = (BasicNetwork) train.Method;
            var pilot = new NeuralPilot(network, true);
            Console.WriteLine(pilot.ScorePilot());
            EncogFramework.Instance.Shutdown();
        }
        public void TestGenetic()
        {
            IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);
            ICalculateScore score = new TrainingSetScore(trainingData);

            MLMethodGeneticAlgorithm genetic = new MLMethodGeneticAlgorithm(() =>
            {
                return NetworkUtil.CreateXORNetworkUntrained();
            }, score, 500);

            NetworkUtil.TestTraining(genetic, 0.00001);
        }
示例#4
0
        public void BuildNetwork(double slat, double slon, double lat, double lon)
        {
            string hostname = Dns.GetHostName();
            SourceLocation = new Position(slat, slon);
            DestLocation = new Position(lat, lon);
            BasicNetwork network = CreateNetwork();
            string fn = @"Robot_From" + slat + "_" + slon + "_To_" + lat + "_" + lon + ".net";
            int chromosomes = 128;
            IMLTrain train;
            train = new MLMethodGeneticAlgorithm(() =>
                {
                    BasicNetwork result = CreateNetwork();
                    ((IMLResettable) result).Reset();
                    return result;
                }, new RobotScore(), chromosomes);
            int epoch = 1;
            var scoresAverage = new List<double>();
            double netavg = 0;

            while (epoch <= chromosomes || train.Error <= 0)
            {
                GC.Collect();
                Scores = new List<double>();
                train.Iteration();
                double average = GetitterationAverage();
                scoresAverage.Add(average);
                double avg = GetTrainAverage(scoresAverage);
                //_ProgressQueue.Publish(new TaskProgressMessage
                //    {
                //        CurrentTime = DateTime.Now,
                //        PercentComplete = epoch,
                //        StartTime = _starttime,
                //        Status = TaskStatus.InProcess,
                //        TaskId = _taskid,
                //        MessageId = Guid.NewGuid(),
                //        TransmisionDateTime = DateTime.Now,
                //        Details =
                //            hostname + @" Epoch #" + epoch + @" Score:" + train.Error + @" Chromosomes: " + chromosomes
                //    }
                //    );

                lock (NetworkLock)
                {
                    if (train.Error > 0)
                    {
                        NeuralNetwork net =
                            Mds.GetCollectionQueryModel<NeuralNetwork>(Query.And(Query.EQ("StartPosition.X", slat),
                                                                                 Query.EQ("StartPosition.Y", slon),
                                                                                 Query.EQ("EndPosition.X", lat),
                                                                                 Query.EQ("EndPosition.Y", lon)))
                               .FirstOrDefault();
                        if (net == null)
                            net = new NeuralNetwork
                                {
                                    EndPosition = new Position(lat, lon),
                                    Id = Guid.NewGuid(),
                                    StartPosition = new Position(slat, slon)
                                };

                        FileStream fs = File.Create(fn);
                        EncogDirectoryPersistence.SaveObject(fs, train.Method);
                        fs.Close();
                        MongoCollection<NeuralNetwork> col = Mds.DataBase.GetCollection<NeuralNetwork>("NeuralNetwork");
                        col.Save(net);
                        Mds.SaveFile(fn, net.Id);
                        File.Delete(fn);
                    }
                }
                epoch++;

                if (epoch > (chromosomes/2) && train.Error <= 0)
                {
                    chromosomes *= 2;

                    train = new MLMethodGeneticAlgorithm(() =>
                        {
                            BasicNetwork result = CreateNetwork();
                            ((IMLResettable) result).Reset();
                            return result;
                        }, new RobotScore(), chromosomes);

                    epoch = 1;
                }

                netavg = avg;
                if (chromosomes == 4096)
                    break;
            }
            _ProgressQueue.Publish(new TaskProgressMessage
                {
                    CurrentTime = DateTime.Now,
                    PercentComplete = 100,
                    StartTime = _starttime,
                    Status = TaskStatus.Complete,
                    TaskId = _taskid,
                    MessageId = Guid.NewGuid(),
                    TransmisionDateTime = DateTime.Now,
                    Details = hostname
                }
                );
            EncogFramework.Instance.Shutdown();
        }
示例#5
0
        public void geneticNeural()
        {
            IMLTrain train = new MLMethodGeneticAlgorithm (() => {
                BasicNetwork result = ((PlayerNeural)player1).network;
                ((IMLResettable)result).Reset ();
                return result;
            }, new ScorePlayer (player1, player2, false), POPULATION_SIZE);

            int epoch = 1;

            DateTime started = DateTime.Now;

            int minutes = 0;
            do {
                train.Iteration ();

                TimeSpan span = (DateTime.Now - started);
                minutes = span.Minutes;

                Console.WriteLine ("Epoch #" + epoch + " Error:" + train.Error);
                epoch++;

            } while (minutes <= NeuralTicTacToe.TRAIN_MINUTES);
            SaveNetwork ((BasicNetwork)train.Method);
        }