コード例 #1
0
        private void Adaptation(SetOfIOPairs trainingSet, int maxK, double eps, double lambda, double micro)
        {
            double    delta;
            Gradients deltaGradients   = new Gradients(this);
            Gradients totalGradients   = new Gradients(this);
            Gradients partialGradients = new Gradients(this);

            Console.WriteLine("setting up random weights and thresholds ...");

            for (int i = NumberOfLayers() - 1; i >= 1; i--)
            {
                NeuralLayer currentLayer = GetLayer(i);
                for (int j = 0; j < currentLayer.NumberOfNeurons(); j++)
                {
                    Neuron currentNeuron = currentLayer.GetNeuron(j)
                    ;
                    currentNeuron.Threshold = 2 * Random() - 1;
                    for (int k = 0; k < currentNeuron.NumberOfInputs(); k++)
                    {
                        currentNeuron.GetInput(k).Weight = 2 * Random() - 1;
                    }
                }
            }

            int    currK = 0;
            double currE = double.PositiveInfinity;

            Console.WriteLine("entering adaptation loop ... (maxK = " + maxK + ")");

            while (currK < maxK && currE > eps)
            {
                ComputeTotalGradient(totalGradients, partialGradients, trainingSet);
                for (int i = NumberOfLayers() - 1; i >= 1; i--)
                {
                    NeuralLayer currentLayer = GetLayer(i);
                    for (int j = 0; j < currentLayer.NumberOfNeurons(); j++)
                    {
                        Neuron currentNeuron = currentLayer.GetNeuron(j);
                        delta = -lambda *totalGradients.GetThreshold(i, j)
                                + micro * deltaGradients.GetThreshold(i, j);

                        currentNeuron.Threshold += delta;
                        deltaGradients.SetThreshold(i, j, delta);
                    }

                    for (int k = 0; k < currentLayer.NumberOfNeurons(); k++)
                    {
                        Neuron currentNeuron = currentLayer.GetNeuron(k);
                        for (int l = 0; l < currentNeuron.NumberOfInputs(); l++)
                        {
                            delta = -lambda *totalGradients.GetWeight(i, k, l) +
                                    micro * deltaGradients.GetWeight(i, k, l);

                            currentNeuron.GetInput(l).Weight += delta;
                            deltaGradients.SetWeight(i, k, l, delta);
                        }
                    }
                }

                currE = totalGradients.GetGradientAbs();
                currK++;
                if (currK % 25 == 0)
                {
                    Console.WriteLine("currK=" + currK + "   currE=" + currE);
                }
            }
        }