public void ExampleTest1() { Accord.Math.Tools.SetupGenerator(0); // We'll use a simple XOR function as input. double[][] inputs = { new double[] { 0, 0 }, // 0 xor 0 new double[] { 0, 1 }, // 0 xor 1 new double[] { 1, 0 }, // 1 xor 0 new double[] { 1, 1 }, // 1 xor 1 }; // XOR output, corresponding to the input. double[][] outputs = { new double[] { 0 }, // 0 xor 0 = 0 new double[] { 1 }, // 0 xor 1 = 1 new double[] { 1 }, // 1 xor 0 = 1 new double[] { 0 }, // 1 xor 1 = 0 }; // Setup the deep belief network (2 inputs, 3 hidden, 1 output) DeepBeliefNetwork network = new DeepBeliefNetwork(2, 3, 1); // Initialize the network with Gaussian weights new GaussianWeights(network, 0.1).Randomize(); // Update the visible layer with the new weights network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Unsupervised learning on each hidden layer, except for the output. for (int i = 0; i < network.Layers.Length - 1; i++) { teacher.LayerIndex = i; // Compute the learning data with should be used var layerInput = teacher.GetLayerInput(inputs); // Train the layer iteratively for (int j = 0; j < 5000; j++) teacher.RunEpoch(layerInput); } // Supervised learning on entire network, to provide output classification. var backpropagation = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 5000; i++) backpropagation.RunEpoch(inputs, outputs); // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(inputs[i]); double outputResult = outputValues.First() >= 0.5 ? 1 : 0; if (outputResult == outputs[i].First()) { correct++; } } Assert.AreEqual(4, correct); }
public void StartProcessing() { _isNeedStop = false; SigmoidAlphaValue = viewModel.SigmoidsAlpha; LearningRate = viewModel.LearningRate; Momentum = viewModel.Momentum; var lines = File.ReadAllLines(viewModel.TrainingFilePath); // Initialize input and output values var dataset = CsvConverter.ToDouble(lines); // Layers (without input layer, contains output layer) int[] layers = { 20, dataset.Outputs[0].Length }; // Create perceptron Network = new ActivationNetwork(new SigmoidFunction(SigmoidAlphaValue), dataset.Inputs[0].Length, layers); // Create teacher var teacher = new BackPropagationLearning(Network) { LearningRate = LearningRate, Momentum = Momentum }; int iteration = 1; try { List <double> errorsList = new List <double>(); while (!_isNeedStop) { // run epoch of learning procedure double error = teacher.RunEpoch(dataset.Inputs, dataset.Outputs); errorsList.Add(error); // show current iteration & error viewModel.Epoches = iteration; viewModel.AverageError = error; iteration++; // check if we need to stop if (error <= viewModel.LearningErrorLimit) { break; } if (iteration > viewModel.EpochesLimit && viewModel.EpochesLimit > 0) { break; } } } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }
private void TrainNeuralNetwork() { Debug.WriteLine("MyNN: Learning Started"); m_BPLearning = new BackPropagationLearning(m_network, m_learningRate, m_momentum, m_goal); double[][] input; double[][] output; int maxEpochs = 500; CreateInputOutput(out input, out output); if (input == null && output == null) { return; //network is too small (needs to be recreated) } double error = m_BPLearning.Train(input, output, ref maxEpochs); Debug.WriteLine(string.Format("MyNN trained: {0} epochs, {1} error", maxEpochs, error)); if (m_saveIsRequired) { m_saveIsRequired = false; FileOptions.SaveTrainingSet(m_curveSets); m_curveSets = null; FileOptions.SaveNeuralNetwork(m_network); } OnNetworkLearnt(error, maxEpochs); }
public Tuple <double, TimeSpan> Train(double[][] inputs, double[][] outputs) { Network = new ActivationNetwork(new BipolarSigmoidFunction(SigmoidAlphaValue), NeuronsInFirstLayer, NeuronsInHiddenLayer, 1); BackPropagationLearning teacher = new BackPropagationLearning(Network); teacher.LearningRate = LearningRate; Network.Randomize(); var sw = Stopwatch.StartNew(); double error = double.PositiveInfinity; double previous = double.PositiveInfinity; for (int i = 1; i <= EpochCount; i++) { error = teacher.RunEpoch(inputs, outputs); if (error == 0 || previous < error) { break; } previous = error; // Console.WriteLine(String.Format("Epoch={0} Error={1}", i, error)); } sw.Stop(); return(Tuple.Create(error, sw.Elapsed)); }
public int trainingNeuron() { //inputData = inputList.ToArray(); //outputData = outputList.ToArray(); network = new ActivationNetwork( activateFunction, inputData[0].Length, inputData[0].Length * 2, outputData[0].Length); BackPropagationLearning teacher = new BackPropagationLearning(network); int iterationsCount = 0; bool flag = true; while (iterations != 0 && iterationsCount < iterations && flag) { flag = false; teacher.RunEpoch(inputData, outputData); for (int i = 0; i < inputData.Length && !flag; i++) { if (!CompareOutput(outputData[i], ValidateOutput(network.Compute(inputData[i])))) { flag = true; } } iterationsCount++; if (!flag) { break; } } return(iterationsCount); }
public void BackLearning() { double[][] input = new double[1][] { new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 } }; double[][] output = new double[4][] { new double[] { 0 }, new double[] { 1 }, new double[] { 1 }, new double[] { 0 } }; ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(2), 9, // two inputs in the network 2, // two neurons in the first layer 1); // one neuron in the second layer //DistanceNetwork // create teacher BackPropagationLearning teacher = new BackPropagationLearning(network); var needToStop = false; while (!needToStop) { // run epoch of learning procedure //double error = teacher.RunEpoch(input, output); // check error value to see if we need to stop // ... } }
Network BuildApproximator(Track[] tracks) { const int Epochs = 10000; const int ProgressReportFrequency = Epochs / 20; const double TrainingSetRatio = 0.4; var ann = new ActivationNetwork(new SigmoidFunction(), AnnInput.InputCount, 7, 1); var teacher = new BackPropagationLearning(ann) { Momentum = 0.4 }; var trainingSet = BuildTrainingSet(tracks.Choice(TrainingSetRatio)); var inputList = trainingSet.Select(x => (double[])x.Input).ToArray(); var outputList = trainingSet.Select(x => new[] { x.Output }).ToArray(); Console.WriteLine("Training..."); for (int i = 0; i < Epochs; i++) { teacher.LearningRate = Math.Pow(i, -1 / 3.0); var avErr = teacher.RunEpoch(inputList, outputList); // / inputList.Length; if (i % ProgressReportFrequency == 0) { Console.WriteLine(avErr); } if (avErr < 1) { break; } } return(ann); }
public void TestMethod1() { double sigmoidAlphaValue = 3; List <double[]> input = new List <double[]>(); input.Add(new double[] { 0.2 }); input.Add(new double[] { 0.4 }); input.Add(new double[] { 0.6 }); input.Add(new double[] { 0.8 }); input.Add(new double[] { 1 }); List <double[]> output = new List <double[]>(); output.Add(new double[] { 0.1 }); output.Add(new double[] { 0.4 }); output.Add(new double[] { 0.6 }); output.Add(new double[] { 0.75 }); output.Add(new double[] { 0.81 }); ActivationNetwork network = new ActivationNetwork( new BipolarSigmoidFunction(sigmoidAlphaValue), 1, 1, 1); BackPropagationLearning teacher = new BackPropagationLearning(network); teacher.LearningRate = 0.1; teacher.Momentum = 0; while (true) { // run epoch of learning procedure double error = teacher.RunEpoch(input.ToArray(), output.ToArray()) / input.Count; } }
private void CreateNetwork() { network = new ActivationNetwork( new BipolarSigmoidFunction(alfa), //aktivasyon fonksiyonu inputSayisi, //input sayısı gizliKatmanSayisi, //gizli katman sayısı cikisSayisi //çıkış katmanı sayısı ); BackPropagationLearning backprob = new BackPropagationLearning(network); backprob.LearningRate = 0.1; backprob.Momentum = 0.0; int iterasyon = 1; while (!dongu) { double hata = backprob.RunEpoch(_inputData, _outputData); iterasyon++; if (iterasyon > 1000) // || hata < 0.0008 { break; } } MessageBox.Show("Öneriler Hesaplandı"); }
public double CheckAccuracy(double lr, double m, int neurons) { double accuracy = 0; for (int k = 0; k < Folds; k++) { var network = new ActivationNetwork(new SigmoidFunction(), Data.FeatureCount, new int[] { neurons, 1 }); var teacher = new BackPropagationLearning(network); teacher.LearningRate = lr; teacher.Momentum = m; var needToStop = false; int epochCount = 0; while (!needToStop) { double error = teacher.RunEpoch(TrainInput[k], TrainOutput[k]); epochCount++; if (epochCount > 5000 || error < 0.01) { needToStop = true; } } int correct = 0; for (int i = 0; i < TestInput[k].Length; i++) { var label = Math.Round(network.Compute(TestInput[k][i])[0]); if (label == (TestOutput[k][i][0])) { correct++; } } accuracy += (double)correct / (TestInput[k].Length); } return(accuracy / Folds); }
public void load_button_Click(object sender, EventArgs e) { if (csv_file == "") { content_box.Text += "Data set not yet selected!\r\n"; } else { //example input:15,15,15, String nodes_str = nodes.Text; String[] nodes_str_arr = nodes_str.Split(','); int[] n = new int[nodes_str_arr.Length + 1]; for (int index = 0; index < nodes_str_arr.Length; index++) { n[index] = Convert.ToInt32(nodes_str_arr[index]); } //output layer is fixed(6) n[nodes_str_arr.Length] = 6; network = new ActivationNetwork(new BipolarSigmoidFunction(2), 30, n); bp = new BackPropagationLearning(network); dt = Csv2DataTable(this.csv_file, 0); train_number = Convert.ToInt32(TrainingRows.Text); test_number = Convert.ToInt32(TestRows.Text); epoch = Convert.ToInt32(Epoch.Text); Train(); } }
void Train() { LoadTrainData(); // prepare learning data double[][] input = new double[sampleCount][]; double[][] output = new double[sampleCount][]; for (int i = 0; i < sampleCount; i++) { input[i] = new double[inputCount]; output[i] = new double[outputCount]; // set input input[i][0] = data[i, 0]; input[i][1] = data[i, 1]; input[i][2] = data[i, 2]; // set output output[i][classes[i]] = 1; } var teacher = new BackPropagationLearning(Network); while (true) { double error = teacher.RunEpoch(input, output); if (error == 0) { break; } } Network.Save(fn); }
public void Train(IList <ClassifiedTrademark> trainingSet) { var inputs = new double[trainingSet.Count][]; var outputs = new double[trainingSet.Count][]; for (int i = 0; i < trainingSet.Count; i++) { inputs[i] = featureExtractor.ExtractFeatures(trainingSet[i].Image); outputs[i] = new double[classCount]; outputs[i][trainingSet[i].TrademarkClass] = 1; } ann = new ActivationNetwork(new SigmoidFunction(), featureExtractor.Dimension, 12, classCount); //12 ann.Randomize(); var teacher = new BackPropagationLearning(ann) { LearningRate = 0.1 }; const int iterations = 100000; const int reportFreq = iterations / 20; for (int i = 0; i < iterations; i++) { var e = teacher.RunEpoch(inputs, outputs) / trainingSet.Count; if (i % reportFreq == 0) { Console.WriteLine("Training. " + GetType().Name + " error rate: " + e); } if (Math.Abs(e) < 1e-4) { break; } } }
private void Learn() { errors.Clear(); var network = new ActivationNetwork( new SigmoidFunction(), // sigmoid activation function 2, hidden, 2); BackPropagationLearning teacher = new BackPropagationLearning(network); double error = 1; int k = inputs.Length; while (k >= inputs.Length / 100) { error = teacher.RunEpoch(inputs, outputs); k = 0; var lst = new double[inputs.Length][]; for (int i = 0; i < inputs.Length; i++) { var c = network.Compute(inputs[i]); if (c[0] > c[1] & outputs[i][0] < outputs[i][1] || c[0] < c[1] & outputs[i][0] > outputs[i][1]) { k++; } lst[i] = c; } comput = lst; errors.Add(k); } }
public void CreateNetwork(double learningRate, double momentum) { ActNetwork = new ActivationNetwork((IActivationFunction) new SigmoidFunction(2), 16, MiddleNeuronsCount, 4); Teacher = new BackPropagationLearning(ActNetwork); Teacher.LearningRate = learningRate; Teacher.Momentum = momentum; }
public override double Train(Data info, float lr, float mom) { //Setup trainer using backpropagation. BackPropagationLearning teacher = new BackPropagationLearning(AccordNetwork); //Train network on data set. double error = double.PositiveInfinity; double lastError; do { lastError = error; int i = 0; while (i < 1000) { error = teacher.RunEpoch(info.InputData, info.OutputData); i++; } } while (lastError - error > 0.0000001); return(error); }
public void TeachNeuralteacher(List <string> list) { double[][] outputs = new double[outputsC45.Length][]; for (int i = 0; i < outputsC45.Length; i++) { outputs[i] = new double[1] { outputsC45[i] - 1 }; } var inputs = GetC45Data(list); IActivationFunction function = new SigmoidFunction(2); network = new ActivationNetwork(function, 12, 4, 4, 1); teacher = new BackPropagationLearning(network); var input = GetC45Data(list); double tempMin = 9999999999; double error = 0; int count = 0; while (count != 3) { error = teacher.RunEpoch(input, outputs); if (error + 0.00001 < tempMin) { tempMin = error; count = 0; } else { count++; } } }
public void AutoTeaching(BackPropagationLearning _teacher) { for (int i = 0; i < 100000; i++) { double pacmanLength = R.Next(800); double AlphaSine = R.NextDouble(); double ghostLength = R.Next(600); double BetaSine = R.NextDouble(); double[] Inputs = { pacmanLength, AlphaSine, ghostLength, BetaSine }; Console.Write("[T] Inputs >> "); for (int j = 0; j < Inputs.Length; j++) { Console.Write(Inputs[j] + " "); } Console.WriteLine(); double[] Outputs = { BetaSine }; Console.Write("[T] Outputs >> "); for (int j = 0; j < Outputs.Length; j++) { Console.Write(Outputs[j] + " "); } Console.WriteLine(); for (int j = 0; j < 2; j++) { _teacher.Teach(Inputs, Outputs); double[] Test = _teacher.Launch(Inputs); Console.Write("[T] Out >> "); for (int k = 0; k < Test.Length; k++) { Console.Write(Test[k] + " "); } Console.WriteLine(); } } }
public void TrainNetwork(NetworkGraph graph) { teacher = new BackPropagationLearning(network); InputData = GetXvals(); OutputData = GetSinWavePoints(); double error = int.MaxValue; int iteration = 1; while (error > .001) { error = teacher.RunEpoch(InputData, OutputData); graph.AddTitle(string.Format("SIN(x) Iteration {0}-{1:0.00} Error", iteration, error)); graph.ResetData(); int i = 0; foreach (var x in InputData) { graph.AddPoint(x[0], OutputData[i][0], network.Compute(x)[0]); i++; } graph.Update(); Thread.Sleep((int)Math.Max(error, 10)); iteration++; } var asdf = 0; }
public void UpdateNeuralNet(string csvString, string trainedNetName) { if (!File.Exists(outputDir + "dataset/" + trainedNetName + ".tmp")) { var lines = csvString.Split('\n'); var csv = from line in lines select(line.Split(',')).ToArray(); double[][] inputs = new double[150][]; double[][] outputs = new double[150][]; string[] types = new string[150]; int rowIndex = 0; foreach (var row in csv) { int colIndex = 0; inputs[rowIndex] = new double[4]; foreach (var col in row) { if (colIndex < row.Length - 1) { inputs[rowIndex][colIndex] = double.Parse(col); } else { types[rowIndex] = col; } colIndex++; } rowIndex++; } string[] distinct = types.Select(o => o.Replace("\r", "")).Distinct().ToArray(); PutDistinctLevel(trainedNetName, distinct); BackPropagation bp = new BackPropagation(); rowIndex = 0; foreach (var row in types) { outputs[rowIndex] = new double[3]; outputs[rowIndex] = bp.getIndex(row.Replace("\r", ""), distinct); rowIndex++; } // create neural network ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(0.43), 4, 8, 3); BackPropagationLearning teacher = new BackPropagationLearning(network); teacher.LearningRate = 0.2; teacher.Momentum = 0; // loop double error = 1; while (error > 0.01) { // run epoch of learning procedure error = teacher.RunEpoch(inputs, outputs); Console.WriteLine("error is: " + error); // check error value to see if we need to stop // ... } network.Save(outputDir + @"dataset/" + trainedNetName + ".tmp"); } }
public void RunTraining2(Training2Parameters parameters) { LogInfoUsingBothLoggers("Started supervised training."); var trainingData = _configuration.InputsOutputsData; var teacher = new BackPropagationLearning(NeuralNetwork) { LearningRate = parameters.LearningRate, Momentum = parameters.Momentum, }; var guiLogIntensity = GetGuiLogIntensity(parameters.SupervisedEpochs); foreach (int i in Enumerable.Range(1, parameters.SupervisedEpochs)) { var error = teacher.RunEpoch(trainingData.Inputs, trainingData.Outputs) / trainingData.Inputs.Length; var message = $"Supervised: {i}, Error = {error}"; LogCurrentEpochResult(message, guiLogIntensity, i, parameters.SupervisedEpochs); if (_skipPhaseRequest.RequestedAndUnhandled) { LogPhaseSkippnigAndNotifyHandled(i, parameters.SupervisedEpochs); break; } } }
static void Main(string[] args) { List <double> inputs = new List <double>(); List <double> outputs = new List <double>(); // load training data // normalize input data // normalize output data var network = new ActivationNetwork(new SigmoidFunction(ALPHA_VALUE), 1, FIRST_LAYER_NEURON_COUNT, 1); var teacher = new BackPropagationLearning(network); network.Randomize(); teacher.LearningRate = 0.75; int iteration = 1; double error = 1.0; while (error > 0.25) { iteration++; if ((iteration % 1000) == 0) { Console.WriteLine("Error {0}\t| It: {1}", error, iteration); } } Console.ReadLine(); }
public void Train(int cycleCount) { double meanError = 0; if (trainingSet.Count < 20) { Initialize(); } ActivationNetwork trainingNetwork = new ActivationNetwork(new SigmoidFunction(2), 6, 6, 4, 4); BackPropagationLearning teacher = new BackPropagationLearning(trainingNetwork); teacher.Momentum = 0.1; //teacher.Run(trainingSet[0].Dataset,trainingSet[0].Genre); for (int i = 0; i < cycleCount; i++) { foreach (SoundSnippet snippet in trainingSet) { meanError += teacher.Run( snippet.Dataset, snippet.Genre); } } meanError = meanError / (cycleCount * trainingSet.Count); network = trainingNetwork; network.Save(networkPath); Console.WriteLine("Done!"); Console.WriteLine("Error rate: " + Convert.ToString(meanError)); }
public List <double> TeachNetwork(List <double> Input, List <double> Output) { var teacher = new BackPropagationLearning(network) { LearningRate = 0.1f, Momentum = 0.9f }; /*var teacher = new Accord.Neuro.Learning.DeepNeuralNetworkLearning(network) * { * Algorithm = (ann, i) => new ParallelResilientBackpropagationLearning(ann), * LayerIndex = network.Layers.Length - 1, * };*/ //double[][] inputs, outputs; //Main.Database.Training.GetInstances(out inputs, out outputs); // Start running the learning procedure //for (int i = 0; i < Epochs && !shouldStop; i++) { teacher.Run(Input.ToArray(), Output.ToArray()); //double error = teacher.RunEpoch(inputs, outputs); } //network.UpdateVisibleWeights(); return(new List <double>(network.Compute(Input.ToArray()))); }
void BackPropagationMethod() { an = new ActivationNetwork( new SigmoidFunction(), inputCount, hidden, outputCount ); bpn = new BackPropagationLearning(an); new NguyenWidrow(an).Randomize(); Console.WriteLine("Learning"); for (var i = 0; i < maxEpoch; i++) { var error = bpn.RunEpoch(inputImgArray.ToArray(), outputImgArray.ToArray()); if (error < errorGoal) { break; } if (i % 10 == 0) { Console.WriteLine($"Report error BPN {i} : {error}"); } } an.Save(savedANNetwork); }
/// <summary> /// Initializes the member network and the member teacher. /// </summary> /// <param name="network"></param> /// <param name="teacher"></param> private void SetUpNetwork(ActivationNetwork network, NeuralNetworkInfo info) { string teacher = info.Teacher; m_Network = network; if (teacher == "BackPropagation") { BackPropagationLearning learner = new BackPropagationLearning(m_Network); learner.LearningRate = info.LearningRate; learner.Momentum = info.Momentum; m_Teacher = learner; } else if (teacher == "Perceptron") { PerceptronLearning learner = new PerceptronLearning(m_Network); learner.LearningRate = info.LearningRate; m_Teacher = learner; } else if (teacher == "DeltaRule") { DeltaRuleLearning learner = new DeltaRuleLearning(m_Network); learner.LearningRate = info.LearningRate; m_Teacher = learner; } else { BackPropagationLearning learner = new BackPropagationLearning(m_Network); learner.LearningRate = info.LearningRate; learner.Momentum = info.Momentum; m_Teacher = learner; } }
public ExamingNetworkMaster() { Console.WriteLine("Welcome to Examing Network Master"); Console.WriteLine("Now you have to load existing network or create new"); FeedForwardNetwork network = new FeedForwardNetwork(); Console.WriteLine("Do you want to load existing network?"); if (Console.ReadLine() == "Yes") { network = FeedForwardNetwork.Load(Console.ReadLine()); } else { network = new FeedForwardNetwork(new BipolarSigmoidFunction(), Environment.TickCount, 1, 7, 1); Console.WriteLine("New neural network created successfully"); } BackPropagationLearning teacher = new BackPropagationLearning(network); teacher.LearningRate = 0.03; Console.WriteLine("Let's teach it"); Teaching(teacher); Console.WriteLine("Now it's time to exam this network(:"); Examing(network); Console.WriteLine("Do you want to save this network?"); if (Console.ReadLine() == "Yes") { Console.WriteLine("Enter filename: "); network.Save(Console.ReadLine()); } Console.WriteLine("Press enter to exit..."); Console.ReadKey(); }
private void learnNetworkSupervised() { if (!Main.CanClassify) { return; } Dispatcher dispatcher = Dispatcher.CurrentDispatcher; new Task(() => { var teacher = new BackPropagationLearning(Main.Network) { LearningRate = LearningRate, Momentum = Momentum }; double[][] inputs, outputs; Main.Database.Training.GetInstances(out inputs, out outputs); // Start running the learning procedure for (int i = 0; i < Epochs && !shouldStop; i++) { double error = teacher.RunEpoch(inputs, outputs); dispatcher.BeginInvoke((Action <int, double>)updateError, DispatcherPriority.ContextIdle, i + 1, error); } Main.Network.UpdateVisibleWeights(); IsLearning = false; }).Start(); }
public OddTest() { Console.WriteLine("Welcome to Neural Odd Test Example"); FeedForwardNetwork network = new FeedForwardNetwork(new SigmoidFunction(), 1, 10, 1); BackPropagationLearning teacher = new BackPropagationLearning(network); teacher.LearningRate = 0.1; Random R = new Random(Environment.TickCount); Console.WriteLine("Training..."); for (int i = 0; i < 1000; i++) { Console.Write("[T] Input >> "); int Input = R.Next(1024); Console.WriteLine(Input); teacher.Teach(Binary(Input), (Input % 2 == 0) ? (new double[] { 0 }) : (new double[] { 1 })); Console.Write("[T] Output >> "); Console.WriteLine(network.Launch(Binary(Input))[0].ToString()); } Console.WriteLine("Examing..."); Console.Write("[E] Input >> "); string userString = Console.ReadLine(); while (userString != "stop") { int Number = int.Parse(userString.Split(' ')[0]); Console.Write("[E] Output >> "); Print(network.Launch(Binary(Number))); Console.Write("[E] Input >> "); userString = Console.ReadLine(); } Console.WriteLine(); }
public virtual void Prepare() { PrepareData(); PrepareCharts(); network = new ActivationNetwork(new Tanh(0.2), Sizes[0], Sizes.Skip(1).ToArray()); network.ForEachWeight(z => rnd.NextDouble() * 2 - 1); teacher = new BackPropagationLearning(network); teacher.LearningRate = 1; Form = new Form() { Text = GetType().Name, Size = new Size(800, 600), FormBorderStyle = FormBorderStyle.FixedDialog, Controls = { AreaChart, HistoryChart } }; }
private void Awake() { this.agent_play = false; //ActivationLayer activationLayer1 = new ActivationLayer(4, 2, new LinearFunction()); //ActivationLayer activationLayer2 = new ActivationLayer(1, 4, new LinearFunction()); this.network = new ActivationNetwork(new LinearFunction(), 2, 8); teacher = new BackPropagationLearning(network); ///// this.network2 = new ActivationNetwork(new SigmoidFunction(), 8, 8); teacher2 = new BackPropagationLearning(network2); ///// this.network3 = new ActivationNetwork(new LinearFunction(), 8, 1); teacher3 = new BackPropagationLearning(network3); ///// this.network4 = new ActivationNetwork(new SigmoidFunction(), 1, 1); teacher4 = new BackPropagationLearning(network4); }
public AnnAgent(bool learn, int boardSize, byte player = 1) { learning = learn; playerNumber = player; int boardFields = boardSize * boardSize; if(File.Exists("ann" + boardSize + ".bin")) network = (ActivationNetwork)Serialization.LoadNetwork("ann" + boardSize + ".bin"); else network = new ActivationNetwork(new BipolarSigmoidFunction(), boardFields, 5, boardFields * 2); backProp = new BackPropagationLearning(network); teacher = new MinimaxAgent(2, player); }
public BackPropogation() { InitializeComponent(); activation_nework = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 10, 1); watch1 = new Stopwatch(); watch2 = new Stopwatch(); watch3 = new Stopwatch(); backgroundWorkerTrainer.Disposed += backgroundWorkerTrainer_Disposed; backgroundWorkerTrainer.DoWork += backgroundWorkerTrainer_DoWork; backgroundWorkerTrainer.ProgressChanged += backgroundWorkerTrainer_ProgressChanged; backgroundWorkerTrainer.WorkerSupportsCancellation = true; backgroundWorkerTrainer.WorkerReportsProgress = true; saveFileDialog1.Filter = "feed forward network files (*.ffn)|*.ffn"; saveFileDialog1.Title = "Save neural networkfile"; saveFileDialog1.InitialDirectory = null; saveFileDialog1.FileName = null; openFileDialog1.Filter = "feed forward network files (*.ffn)|*.ffn"; openFileDialog1.Title = "Load neural network file"; openFileDialog1.InitialDirectory = null; openFileDialog1.FileName = null; backgroundWorkerSignal.Disposed += backgroundWorkerSignal_Disposed; backgroundWorkerSignal.WorkerSupportsCancellation = true; backgroundWorkerSignal.WorkerReportsProgress = true; backgroundWorkerSignal.DoWork += backgroundWorkerSignal_DoWork; backgroundWorkerSignal.RunWorkerCompleted += backgroundWorkerSignal_RunWorkerCompleted;//80, 70, 60, 50, 40, network1 = activation_nework; network2 = activation_nework; // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1); network3 = activation_nework; // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1); network4 = activation_nework; // new ActivationNetwork(new BipolarSigmoidFunction(), 50, 50, 40, 1); network5 = new ActivationNetwork(new BipolarSigmoidFunction(), 50,1); network6 = new ActivationNetwork(new BipolarSigmoidFunction(), 50, 1); teacher = new BackPropagationLearning(network1); evteacher = new EvolutionaryLearning(network2, 100); reprop = new ResilientBackpropagationLearning(network3); lbteacher = new LevenbergMarquardtLearning(network4); delta = new DeltaRuleLearning(network5); perceptron = new PerceptronLearning(network6); delta.LearningRate = 1; perceptron.LearningRate = 0.1; myPane = new GraphPane(); listPointsOne = new PointPairList(); myPane = zedGraphControl1.GraphPane; // set a title myPane.Title.Text = "Error VS Time"; // set X and Y axis titles myPane.XAxis.Title.Text = "Time in Milliseconds"; myPane.YAxis.Title.Text = "Error"; myCurveOne = myPane.AddCurve("Learning curve", listPointsOne, Color.Red, SymbolType.None); // myCurveOne = myPane.AddCurve("Resilient Back Propagation", listPointstwo, Color.Green, SymbolType.None); // myCurveOne = myPane.AddCurve("Genetic Learning", listPointsthree, Color.Blue, SymbolType.None); }
public BinarySplitForm() { InitializeComponent(); watch = new Stopwatch(); backgroundWorkerTrainer.Disposed += backgroundWorkerTrainer_Disposed; backgroundWorkerTrainer.DoWork += backgroundWorkerTrainer_DoWork; backgroundWorkerTrainer.ProgressChanged += backgroundWorkerTrainer_ProgressChanged; backgroundWorkerTrainer.WorkerSupportsCancellation = true; backgroundWorkerTrainer.WorkerReportsProgress = true; saveFileDialog1.Filter = "feed forward network files (*.ffn)|*.ffn"; saveFileDialog1.Title = "Save neural networkfile"; saveFileDialog1.InitialDirectory = null; saveFileDialog1.FileName = null; openFileDialog1.Filter = "feed forward network files (*.ffn)|*.ffn"; openFileDialog1.Title = "Load neural network file"; openFileDialog1.InitialDirectory = null; openFileDialog1.FileName = null; backgroundWorkerSignal.Disposed += backgroundWorkerSignal_Disposed; backgroundWorkerSignal.WorkerSupportsCancellation = true; backgroundWorkerSignal.WorkerReportsProgress = true; backgroundWorkerSignal.DoWork += backgroundWorkerSignal_DoWork; backgroundWorkerSignal.RunWorkerCompleted += backgroundWorkerSignal_RunWorkerCompleted; // initialize input and output values input = new double[4][] { new double[] {0, 0}, new double[] {0, 1}, new double[] {1, 0}, new double[] {1, 1} }; output = new double[4][] { new double[] {0}, new double[] {1}, new double[] {1}, new double[] {0} }; network = new ActivationNetwork(new SigmoidFunction(2), 2, 2, 1); teacher = new BackPropagationLearning(network); //logg.Show(); // pane used to draw your chart myPane = new GraphPane(); // poing pair lists listPointsOne = new PointPairList(); }
private void learnNetworkSupervised() { if (!Main.CanClassify) return; Dispatcher dispatcher = Dispatcher.CurrentDispatcher; new Task(() => { var teacher = new BackPropagationLearning(Main.Network) { LearningRate = LearningRate, Momentum = Momentum }; double[][] inputs, outputs; Main.Database.Training.GetInstances(out inputs, out outputs); // Start running the learning procedure for (int i = 0; i < Epochs && !shouldStop; i++) { double error = teacher.RunEpoch(inputs, outputs); dispatcher.BeginInvoke((Action<int, double>)updateError, DispatcherPriority.ContextIdle, i + 1, error); } Main.Network.UpdateVisibleWeights(); IsLearning = false; }).Start(); }
public double[] Run(int[] numOfHiddenNeurals) { // validation and testing error double[] errors = new double[2]; // number of learning samples int samples = _data.Length - _windowSize; int trainingSamples = samples - _predictionSize; // prepare learning data double[][] input = new double[samples][]; double[][] output = new double[samples][]; // sample indices int[] indices = new int[samples]; int[] trainingIndices = new int[trainingSamples]; // normalization function var normalizeFunc = new MinMaxNormalization(_xMax, _xMin, _data.Max(), _data.Min()); for (int i = 0; i < samples; i++) { input[i] = new double[_windowSize]; output[i] = new double[_outputNum]; // set input for (int j = 0; j < _windowSize; j++) { input[i][j] = normalizeFunc.Compute(_data[i + j]); } // set output for (int j = 0; j < _outputNum; j++) { output[i][j] = normalizeFunc.Compute(_data[i + _windowSize + j]); } indices[i] = i; } // randomize the sample indices Utils.Shuffle<int>(indices); output.Swap(indices); input.Swap(indices); // get training samples double[][] trainingInput = new double[trainingSamples][]; double[][] trainingOutput = new double[trainingSamples][]; for (int i = 0; i < trainingSamples; i++) { trainingInput[i] = new double[_windowSize]; trainingOutput[i] = new double[_outputNum]; // set input for (int j = 0; j < _windowSize; j++) { trainingInput[i][j] = input[i][j]; } // set output for (int j = 0; j < _outputNum; j++) { trainingOutput[i][j] = output[i][j]; } trainingIndices[i] = i; } //// randomize the sample indices //Utils.Shuffle<int>(trainingIndices); //trainingOutput.Swap(trainingIndices); //trainingInput.Swap(trainingIndices); // create multi-layer neural network int[] neuronsCount = numOfHiddenNeurals.Concat(new int[] { _outputNum }).ToArray(); ActivationNetwork network = new ActivationNetwork( _function, _windowSize, neuronsCount); // create teacher BackPropagationLearning teacher = new BackPropagationLearning(network); // set learning rate and momentum teacher.LearningRate = _learningRate; teacher.Momentum = 0.0; //var teacher = new ParallelResilientBackpropagationLearning(network); //teacher.Reset(_learningRate); // iterations int iteration = 1; // solution array int solutionSize = _data.Length - _windowSize; double[,] solution = new double[solutionSize, 1 + _outputNum]; // calculate X values to be used with solution function for (int j = 0; j < solutionSize; j++) { solution[j, 0] = j + _windowSize; } // loop var needToStop = false; while (!needToStop) { // run epoch of learning procedure double error = teacher.RunEpoch(trainingInput, trainingOutput) / trainingSamples; // calculate solution and learning and prediction errors every 5 double learningError = 0.0; double predictionError = 0.0; if (iteration % 5 == 0) { // go through all the data for (int i = 0; i < samples; i++) { double y = 0.0; double o = 0.0; double err = 0.0; for (int j = 0; j < _outputNum; j++) { y = output[i][j]; o = network.Compute(input[i])[j]; err += (o - y) * (o - y) / _outputNum; // evalue the function solution[i, j + 1] = normalizeFunc.Inverse(o); } // calculate prediction error (MSE) if (i >= trainingSamples) { predictionError += err;// Math.Pow((solution[i, 1] - normalizeFunc.Inverse(output[i][0])), 2.0); } else { learningError += err; } } } // Adaptive Learning - decrease the learning rate // n(t) = n0 * a^(t/T) // a = 1/10^x, x >= 1 teacher.LearningRate = _learningRate * Math.Pow(0.1, iteration / _iterations); // increase iteration iteration++; // check if we need to stop if ((_iterations != 0) && (iteration > _iterations)) { errors[0] = learningError / trainingSamples; errors[1] = predictionError / _predictionSize; Console.WriteLine("Final Learning MSE Error: " + errors[0]); Console.WriteLine("Final Prediction MSE Error: " + errors[1]); Console.WriteLine("Final Learning Rate: " + teacher.LearningRate); Console.WriteLine("Window Size: " + _windowSize + "\n" + "Number of Hidden Neurons: " + neuronsCount[0] + "\n" + "Output Size: " + _outputNum); break; } } ////print result to file //Console.WriteLine("Real Values\t\tRegression Values"); //for (int i = samples - 1; i >= trainingSamples; --i) //{ // Console.WriteLine(normalizeFunc.Inverse(output[i][0]) + "\t\t" + solution[i, 1]); //} return errors; }
static void Main(string[] args) { double[][] inputs; double[][] outputs; double[][] testInputs; double[][] testOutputs; // Load ascii digits dataset. inputs = DataManager.Load(@"../../../data/data.txt", out outputs); // The first 500 data rows will be for training. The rest will be for testing. testInputs = inputs.Skip(500).ToArray(); testOutputs = outputs.Skip(500).ToArray(); inputs = inputs.Take(500).ToArray(); outputs = outputs.Take(500).ToArray(); // Setup the deep belief network and initialize with random weights. DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 500; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(testInputs[i]); if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) { correct++; } } Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%"); Console.Write("Press any key to quit .."); Console.ReadKey(); }
private void button3_Click_1(object sender, EventArgs e) { button3.Enabled = false; network1 = null; teacher = null; network1 = new ActivationNetwork(new BipolarSigmoidFunction(param2), 100, 100, 90, 80, 70, 60, 50, 40, 30, 20, 10, 1); teacher = new BackPropagationLearning(network1); network1.Randomize(); }