public void TestSCG() { IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); IMLTrain bprop = new ScaledConjugateGradient(network, trainingData); NetworkUtil.TestTraining(bprop, 0.04); }
public static BasicNetwork TrainNetwork(string trainSetLocation, string path2SaveModel, string trainingHistoryLocation, int numberOfEpochs, double errorThreshold, NetworkTrainer trainer) { IMLDataSet trainingDataSet = null; List <NeuralLayerDescriptor> descriptor = NetworkTrainingInitializer(trainSetLocation, out trainingDataSet); var network = NetworkCreator.CreateNetwork(descriptor); ITrain train = null; switch (trainer) { case NetworkTrainer.BackProp: train = new Backpropagation(network, trainingDataSet); break; case NetworkTrainer.ResilientBackProp: train = new ResilientPropagation(network, trainingDataSet); break; case NetworkTrainer.Quick: train = new QuickPropagation(network, trainingDataSet); break; case NetworkTrainer.ScaledConjugateGradient: train = new ScaledConjugateGradient(network, trainingDataSet); break; } int epoch = 0; using (System.IO.StreamWriter file = new System.IO.StreamWriter(trainingHistoryLocation)) { do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); file.WriteLine(train.Error); epoch++; } while ((epoch < numberOfEpochs) && (train.Error > errorThreshold)); } var serializer = new BinaryFormatter(); using (var s = new FileStream(path2SaveModel, FileMode.Create)) { serializer.Serialize(s, network); } return(network); }
public void Train(IMLDataSet training) { //SVDTraining train = new SVDTraining(network, training); ITrain train = new ScaledConjugateGradient(network, training); int epoch = 1; do { train.Iteration(); if ((epoch) % (iterations / 10) == 0) { Console.Write("."); } epoch++; } while (epoch < iterations); }
/// <summary> /// Trains the network /// </summary> public virtual void trainNetwork() { INeuralDataSet trainingSet = new BasicNeuralDataSet(networkInput, networkIdealOutput); //ITrain trainBackProp = new Backpropagation(network, trainingSet, BACKPROP_LEARN_RATE, BACKPROP_MOMENTUM); ITrain trainBackProp = new ScaledConjugateGradient(network, trainingSet); double error = Double.MaxValue; double lastError = Double.MaxValue; int epoch = 1; int lastAnneal = 0; int errorExit = 0; double errorOnLastAnnealStart = double.MaxValue; double sameErrorOnLastAnnealStartCount = 0; double currentAnnealInterval = MIN_BACKPROP_ITERATIONS_ANNEAL_START; double annealStartError = 0; do { trainBackProp.Iteration(); error = trainBackProp.Error; if (lastError - error < MAX_RMS_ITERATION_NETWORK_ERROR) errorExit++; else errorExit = 0; Console.WriteLine("Iteration(SC) #{0} Error: {1}", epoch, error.ToString("0.00000000")); if (error > ANNEAL_MIN_ERROR) { if ((lastAnneal > currentAnnealInterval) && (lastError - error < MAX_ANNEAL_START_ERROR)) { if (error == errorOnLastAnnealStart) sameErrorOnLastAnnealStartCount++; else if (error < errorOnLastAnnealStart) { sameErrorOnLastAnnealStartCount = 0; errorOnLastAnnealStart = error; } ICalculateScore score = new TrainingSetScore(trainingSet); NeuralSimulatedAnnealing trainAnneal = new NeuralSimulatedAnnealing(network, score, ANNEAL_STARTTEMP, ANNEAL_ENDTEMP, ANNEAL_ITERATIONS); for (int i = 1; i <= ANNEAL_ATTEMPTS; i++) { trainAnneal.Iteration(); if (i == 1) annealStartError = trainAnneal.Error; Console.WriteLine("Iteration(Anneal) #{0}-{1} Error: {2}", epoch, i, trainAnneal.Error.ToString("0.00000000")); //WebLogging.AddLog("WinRatioNeural", WebLogging.LogCategory.WinRatioNeural, "Iteration(Anneal) #" + i + " Error: " + trainAnneal.Error.ToString("0.00000000")); } if (annealStartError == trainAnneal.Error) { if (currentAnnealInterval < 200) { currentAnnealInterval *= 1.5; Console.WriteLine("Iteration(Anneal) # No improvment. Increasing anneal interval to " + currentAnnealInterval); } else Console.WriteLine("Iteration(Anneal) # No improvment. Anneal interval at max."); } lastAnneal = 0; trainBackProp = new ScaledConjugateGradient(network, trainingSet); trainBackProp.Iteration(); error = trainBackProp.Error; //saveNetwork(correctPredictions.ToString("##0.0")+ "_" + epoch.ToString() + "_nerualPokerAI_LA.nnDAT"); } } //Every 50 epochs we can test the network accuracy //#if DEBUG //if (epoch % 50 == 0) //{ // //We want to switch to the testing set if we are not using all data for training // if (TRAIN_DATA_PERCENT < 1.0) createTestingSets(); // Console.WriteLine(" Network accuracy is currently {0}%",getNetworkAccuracy()); // //Wait for 1 second so that we can read the output // Thread.Sleep(1000); // //Likewise we want to switch back before continuing // if (TRAIN_DATA_PERCENT < 1.0) createTrainingSets(); //} //#endif lastError = trainBackProp.Error; epoch++; lastAnneal++; //} while (error > MAX_RMS_TOTAL_NETWORK_ERROR && errorExit < 10 && epoch < MAX_ITERATIONS); } while (trainBackProp.Error > MAX_RMS_TOTAL_NETWORK_ERROR && epoch < MAX_ITERATIONS && sameErrorOnLastAnnealStartCount < 2); }