// Training algorithm, backpropagation public override bool Train(ClassificationData trainingData) { // Initialize the network, set random weights for the hidden layer for (int i = 0; i < hiddenLayer.Count; i++) { hiddenLayer[i].LoadRandomWeights( ); } // Set random weights for the second layer secondLayer.LoadRandomWeights( ); // Go through all the samples, until the the max number of repetitions int repetitions = 0; while (repetitions < maxRepetitions) { // Go through all samples for (int i = 0; i < trainingData.Samples.Count; i++) { // Compute the predicted result and the error double error; string result = Classify(trainingData.Samples[i]); string expected = trainingData.Samples[i].Category; if ((result == "good") && (expected == "bad")) { error = -1.0; } else if ((result == "bad") && (expected == "good")) { error = 1.0; } else { // No error, continue to the next sample error = 0.0; continue; } // Backpropagate the second layer secondLayer.BackPropagate(learningRate, error); // Backpropagate the first layer for (int j = 0; j < hiddenLayer.Count; j++) { FirstLayerNeuron neuron = (FirstLayerNeuron)hiddenLayer[j]; neuron.BackPropagate(learningRate, error, secondLayer); } } // Next repetition repetitions++; } // Set as trained and return true if there are no errors isTrained = true; return(true); }
public FirstLayer(int n, Dictionary <double, List <double[]> > xSet) { Neurons = new FirstLayerNeuron[3]; var initParams = NeuronsInitTools.GetInitRBFParams(xSet, n); for (int i = 0; i < Neurons.Length; i++) { Neurons[i] = new FirstLayerNeuron(n); Neurons[i].CList = initParams.Item1[i]; Neurons[i].SList = initParams.Item2; } }
public static double CalculateL(List <double> x, FirstLayerNeuron neuron, int withoutIndex = -1) => x .Select((t1, t2) => withoutIndex == t2 ? 1 : neuron.Calculate(t2, t1)) .Aggregate((t3, t4) => t3 *= t4);