void UczenieWstepne() { double blad = 1; double poprzedni_blad = 1; double odZmiany = 0; Boolean czyUczyc = true; int j = 0; for (int i = 0; i < 6; i++) { WygenerujZestawUczacy(j % 6); j++; } List <double[]> wejscie = new List <double[]>(); List <double[]> wyjscie = new List <double[]>(); foreach (ZestawDanychSieci zestawDanych in zestawyUczace) { zestawDanych.NormalizujWejscie(); wejscie.Add(zestawDanych.TablicaWejscia()); wyjscie.Add(zestawDanych.TablicaWyjscia()); } double[][] wejscieTab = wejscie.ToArray(); double[][] wyjscieTab = wyjscie.ToArray(); while (czyUczyc) { // run epoch of learning procedure //blad = nauczyciel.RunEpoch(wejscieTab, wyjscieTab); blad = ewolutor.RunEpoch(wejscieTab, wyjscieTab); if (blad <= PROG_NAUKI) { czyUczyc = false; } else if (poprzedni_blad == blad) { if (odZmiany >= MAX_BEZ_ZMIANY) { czyUczyc = false; } else { odZmiany++; } } else { poprzedni_blad = blad; odZmiany = 0; } } zestawyUczace.Clear(); }
void backgroundWorkerTrainer_DoWork(object sender, DoWorkEventArgs e) { while (work) { // run epoch of learning procedure double error = teacherEV.RunEpoch(input, output); // check error value to see if we need to stop // ... var c = error * 10; var d = c.ToString(); gerrror.Add(Convert.ToInt32(c)); SetText(error.ToString()); chartdata.Add(Convert.ToSingle(c)); listPointsOne.Add((double)watch.ElapsedMilliseconds, error); axisChangeZedGraph(zedGraphControl1); Thread.Sleep(2); if (logger) { SetTextLogger(error.ToString()); } if (watch.IsRunning) { SetTextTime(watch.ElapsedMilliseconds.ToString()); } } }
private void TrainNetworkE(TrainSet trainSet, EvolutionaryLearning teacher) { double[] sides = new double[4]; double[] direction = new double[2]; double[] input = new double[5]; double[] output = new double[4]; double error = 10; int epoch = 10000; while (epoch-- > 0) { for (int s = 0; s < trainSet.Situation.Count; s++) { sides = SimplifyEnvironment(trainSet.Situation[s].Environment); direction = VectorFromDirection(trainSet.Decision[s].Direction); // INPUT input[0] = trainSet.Situation[s].ColonyPopulation; input[1] = sides[0]; // UP input[2] = sides[1]; // RIGHT input[3] = sides[2]; // DOWN input[4] = sides[3]; // LEFT // OUTPUT output[0] = trainSet.Decision[s].IsLeaving ? 1 : -1; output[1] = trainSet.Decision[s].PopulationToMove; output[2] = direction[0]; // X output[3] = direction[1]; // Y error = teacher.RunEpoch(new double[][] { input }, new double[][] { output }); } Debug.Print(error.ToString()); } }
void backgroundWorkerTrainer_DoWork(object sender, DoWorkEventArgs e) { while (work) { double error = 0; // run epoch of learning procedure if (selected_algorythm == 0) { error = teacher.RunEpoch(input, output); listPointsOne.Add((double)watch1.ElapsedMilliseconds, error); } if (selected_algorythm == 1) { error = reprop.RunEpoch(input, output); listPointsOne.Add((double)watch1.ElapsedMilliseconds, error); } if (selected_algorythm == 2) { error = evteacher.RunEpoch(input, output); listPointsOne.Add((double)watch1.ElapsedMilliseconds, error); } if (selected_algorythm == 3) { error = lbteacher.RunEpoch(input, output); listPointsOne.Add((double)watch1.ElapsedMilliseconds, error); } if (selected_algorythm == 4) { error = delta.RunEpoch(input, output); listPointsOne.Add((double)watch1.ElapsedMilliseconds, error); } if (selected_algorythm == 5) { error = perceptron.RunEpoch(input, output); listPointsOne.Add((double)watch1.ElapsedMilliseconds, error); } // check error value to see if we need to stop // ... var c = error * 10; var d = c.ToString(); //gerrror.Add(Convert.ToInt32(c)); SetText(error.ToString()); chartdata.Add(Convert.ToSingle(c)); axisChangeZedGraph(zedGraphControl1); Thread.Sleep(2); if (logger) { SetTextLogger(error.ToString()); } if (watch1.IsRunning) { SetTextTime(watch1.ElapsedMilliseconds.ToString()); } Thread.Sleep(sleeptime); } }
static double Neural_Network(bool show) { double error = new double(); DataTable entireData = DataController.MakeDataTable("../../drug_consumption.txt"); Codification codebook = new Codification(entireData); //"Alcohol", "Amfet", !!"Amyl", "Benzos", "Cofeine", "Cannabis", "Chocolate", "Coke", (1)"Crac", ///"Ecstasy", !!"Heroine", // !!"Ketamine", //"LegalH", "LSD", !!"Meth", //"Mushrooms", "Nicotine", lol "Semeron", "VSA" string LookingFor = "Heroine"; int good = 0; string[][] outputs; string[][] inputs = DataController.MakeString("../../drug_consumption_500.txt", out outputs); string[][] testOutputs; string[][] testInputs = DataController.MakeString("../../drug_consumption_500.txt", out testOutputs); DataTable outputs1 = DataController.MakeDataFromString(outputs, "output"); DataTable inputs1 = DataController.MakeDataFromString(inputs, "input"); DataTable testOutputs1 = DataController.MakeDataFromString(testOutputs, "output"); DataTable testInputs1 = DataController.MakeDataFromString(testInputs, "input"); DataTable Isymbols = codebook.Apply(inputs1); DataTable Osymbols = codebook.Apply(outputs1); DataTable TIsymbols = codebook.Apply(testInputs1); DataTable TOsymbols = codebook.Apply(testOutputs1); double[][] inputsD = Isymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS"); double[][] outputsD = Osymbols.ToJagged <double>(LookingFor); outputsD = DataController.convertDT(outputsD); double[][] inputsT = TIsymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS"); double[][] outputsT = TOsymbols.ToJagged <double>(LookingFor); outputsT = DataController.convertDT(outputsT); DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 7); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); DeepBeliefNetworkLearning FirstLearner = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; int batchCount = Math.Max(1, inputs.Length / 100); int[] groupsNew = Accord.Statistics.Classes.Random(inputsD.Length, batchCount); double[][][] batchesNew = Accord.Statistics.Classes.Separate(inputsD, groupsNew); double[][][] layerData; for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { FirstLearner.LayerIndex = layerIndex; layerData = FirstLearner.GetLayerInput(batchesNew); for (int i = 0; i < 500; i++) { error = FirstLearner.RunEpoch(layerData) / inputsD.Length; if (i % 10 == 0 && show == true) { Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error); } } } var SecondLearner = new BackPropagationLearning(network) { LearningRate = 0.15, Momentum = 0.7 }; EvolutionaryLearning teacher = new EvolutionaryLearning(network, 100); for (int i = 0; i < 800; i++) { error = teacher.RunEpoch(inputsD, outputsD) / inputsD.Length; if (i % 50 == 0 && show == true) { Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error); } } for (int i = 0; i < 800; i++) { error = SecondLearner.RunEpoch(inputsD, outputsD) / inputsD.Length; if (i % 10 == 0 && show == true) { Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error); } } for (int i = 0; i < inputsD.Length; i++) { double[] outputValues = network.Compute(inputsT[i]); if (outputValues.ToList().IndexOf(outputValues.Max()) == outputsT[i].ToList().IndexOf(outputsT[i].Max())) { good++; } } if (show == true) { Console.WriteLine("Poprawność - " + Math.Round(((double)good / (double)inputsD.Length * 100), 4) + "%"); Console.ReadKey(); } return(error); }
///<summary> /// Trains the neural network on input training data using an evolutionary algorithm and returns the final error. ///</summary> public double Train(double[][] trainingData, double[][] targetResults) { return(teacher.RunEpoch(trainingData, targetResults)); }