Exemplo n.º 1
0
        public void ThinkOnce(int inputindex, NeuralNetworkAlgorithm algo = NeuralNetworkAlgorithm.None)
        {
            if (Outputs == null)
                Outputs = new List<List<double>>();

            List<double> output = new List<double>();
            bool firstlayer = true;
            foreach (int layer in Synapses.Keys)
            {
                //transfer values through synapses
                if (!firstlayer)
                {
                    foreach (Synapse s in Synapses[layer])
                        s.Transfer();
                }
                else
                {
                    List<Neuron> altered = new List<Neuron>();
                    //place inputs there
                    foreach (Synapse s in Synapses[layer])
                    {

                        s.Transfer(Inputs[inputindex][s.InputIndex]);
                        firstlayer = false;
                    }

                }
                //make neurons think
                foreach (Neuron n in Neurons[layer + 1])
                {
                    n.StartThinking();
                    n.FinishThinking();

                    //any learning necessary? (only neuron-related)
                    switch (algo)
                    {
                        case NeuralNetworkAlgorithm.FixedIncrement:
                            {
                                // http://faculty.iiit.ac.in/~vikram/nn_intro.html
                                if (layer + 1 != Neurons.Keys.Count - 1) // not in last layer?
                                    break;
                                int c = 0;
                                double difference = (n.Output - Expected[inputindex][n.Index]);
                                if (difference > 0) // value is greater, weights should be lowered
                                    c = -1;
                                else if (difference < 0) // expected is greater, weights should be uppered
                                    c = 1;

                                List<Synapse> connected = (from s in Synapses[layer] where s.Target == n select s).ToList<Synapse>();
                                foreach (Synapse s in connected)
                                    s.Weight += c * s.LastInput;
                                n.Augment += c;
                                break;
                            }
                    }
                }
            }
            //gather results
            if (Neurons.Count != 0)
                foreach (Neuron n in Neurons[Neurons.Count - 1])
                    output.Add(n.Output);

            //store the results
            Outputs.Add(output);
        }
Exemplo n.º 2
0
        public Brain Think(NeuralNetworkAlgorithm algo = NeuralNetworkAlgorithm.None)
        {
            if (algo != NeuralNetworkAlgorithm.BackPropagation)
            {
                if (Inputs == null)
                    return this;
                Outputs = new List<List<double>>();
                for (int inputcounter = 0; inputcounter < Inputs.Count; inputcounter++) // for every input set
                {
                    ThinkOnce(inputcounter, algo);
                }
                return this;
            }
            else // back propagation
            {
                // http://mattmazur.com/2015/03/17/a-step-by-step-backpropagation-example/
                double eta = 0.2;
                //Outputs = new List<List<double>>();
                Brain newbrain = new Brain(this);

                for (int inputcounter = 0; inputcounter < Inputs.Count; inputcounter++) // for every input set
                {
                    ThinkOnce(inputcounter);
                    // compute neuron errors - from the top, not for input neurons...
                    for (int i = newbrain.Neurons.Count - 1; i >= 0; i--)
                    {
                        for (int j = 0; j < newbrain.Neurons[i].Count; j++)
                        {
                            Neuron n = newbrain.Neurons[i][j];
                            if (i == newbrain.Neurons.Count - 1)
                                n.Error = Outputs[inputcounter][j] - Expected[inputcounter][j];
                            else
                            {
                                n.Error = 0;
                                foreach (Synapse s in newbrain.Synapses[i])
                                {
                                    if (s.Source != n)
                                        continue;
                                    n.Error += s.Weight * s.Target.Error * n.f.ComputeDerivation(n.Input, n.Slope);
                                }
                            }
                            // and compute augments
                            double augdiff = eta * n.Error;
                            n.Augment -= augdiff;
                        }
                    }
                    // now update the weights
                    foreach (int i in newbrain.Synapses.Keys)
                    {
                        for (int j = 0; j < newbrain.Synapses[i].Count; j++)
                        {
                            Synapse s = newbrain.Synapses[i][j];
                            double weidiff = eta * s.Target.Error * s.LastInput;
                            weidiff+= s.LastDiff; // momentum-aware
                            s.LastDiff = weidiff;
                            s.Weight -= weidiff;
                        }
                    }
                }
                return newbrain;
            }
        }