示例#1
0
 public void Learn(SetOfIOPairs trainingSet, int maxK, double eps, double lambda, double micro)
 {
     if (trainingSet.Pairs.Count == 0)
     {
         throw new NullReferenceException(
                   "[Error] NN-Learn: You are using an empty training set, neural network couldn't be trained.");
     }
     if (trainingSet.Pairs[0].Inputs.Count != GetLayer(0).NumberOfNeurons())
     {
         throw new IndexOutOfRangeException("[Error] NN-Test: You are trying to pass vector with " +
                                            trainingSet.Pairs[0].Inputs.Count +
                                            " values into neural layer with " +
                                            GetLayer(0).NumberOfNeurons() +
                                            " neurons. Consider using another network, or another descriptors.");
     }
     if (trainingSet.Pairs[0].Outputs.Count != GetLayer(NumberOfLayers() - 1).NumberOfNeurons())
     {
         throw new IndexOutOfRangeException("[Error] NN-Test:  You are trying to pass vector with " +
                                            trainingSet.Pairs[0].Inputs.Count +
                                            " values into neural layer with " +
                                            GetLayer(0).NumberOfNeurons() +
                                            " neurons. Consider using another network, or another descriptors.");
     }
     Adaptation(trainingSet, maxK, eps, lambda, micro);
 }
示例#2
0
        private void ComputeTotalGradient(Gradients totalGradients, Gradients partialGradients,
                                          SetOfIOPairs trainingSet)
        {
            totalGradients.ResetGradients();

            foreach (SetOfIOPairs.IOPair pair in trainingSet.Pairs)
            {
                ComputeGradient(partialGradients, pair.Inputs, pair.Outputs);
                for (int i = NumberOfLayers() - 1; i >= 1; i--)
                {
                    NeuralLayer currentLayer = GetLayer(i);
                    for (int j = 0; j < currentLayer.NumberOfNeurons(); j++)
                    {
                        totalGradients.IncrementThreshold(i, j, partialGradients.GetThreshold(i, j));
                        for (int k = 0; k < currentLayer.LowerLayer().NumberOfNeurons(); k++)
                        {
                            totalGradients.IncrementWeight(i, j, k, partialGradients.GetWeight(i, j, k));
                        }
                    }
                }
            }
        }
示例#3
0
        private void Adaptation(SetOfIOPairs trainingSet, int maxK, double eps, double lambda, double micro)
        {
            double    delta;
            Gradients deltaGradients   = new Gradients(this);
            Gradients totalGradients   = new Gradients(this);
            Gradients partialGradients = new Gradients(this);

            Console.WriteLine("setting up random weights and thresholds ...");

            for (int i = NumberOfLayers() - 1; i >= 1; i--)
            {
                NeuralLayer currentLayer = GetLayer(i);
                for (int j = 0; j < currentLayer.NumberOfNeurons(); j++)
                {
                    Neuron currentNeuron = currentLayer.GetNeuron(j)
                    ;
                    currentNeuron.Threshold = 2 * Random() - 1;
                    for (int k = 0; k < currentNeuron.NumberOfInputs(); k++)
                    {
                        currentNeuron.GetInput(k).Weight = 2 * Random() - 1;
                    }
                }
            }

            int    currK = 0;
            double currE = double.PositiveInfinity;

            Console.WriteLine("entering adaptation loop ... (maxK = " + maxK + ")");

            while (currK < maxK && currE > eps)
            {
                ComputeTotalGradient(totalGradients, partialGradients, trainingSet);
                for (int i = NumberOfLayers() - 1; i >= 1; i--)
                {
                    NeuralLayer currentLayer = GetLayer(i);
                    for (int j = 0; j < currentLayer.NumberOfNeurons(); j++)
                    {
                        Neuron currentNeuron = currentLayer.GetNeuron(j);
                        delta = -lambda *totalGradients.GetThreshold(i, j)
                                + micro * deltaGradients.GetThreshold(i, j);

                        currentNeuron.Threshold += delta;
                        deltaGradients.SetThreshold(i, j, delta);
                    }

                    for (int k = 0; k < currentLayer.NumberOfNeurons(); k++)
                    {
                        Neuron currentNeuron = currentLayer.GetNeuron(k);
                        for (int l = 0; l < currentNeuron.NumberOfInputs(); l++)
                        {
                            delta = -lambda *totalGradients.GetWeight(i, k, l) +
                                    micro * deltaGradients.GetWeight(i, k, l);

                            currentNeuron.GetInput(l).Weight += delta;
                            deltaGradients.SetWeight(i, k, l, delta);
                        }
                    }
                }

                currE = totalGradients.GetGradientAbs();
                currK++;
                if (currK % 25 == 0)
                {
                    Console.WriteLine("currK=" + currK + "   currE=" + currE);
                }
            }
        }