Ejemplo n.º 1
0
 public void updateComingFrom(Neuron n)
 {
     ComingFrom = n;
 }
Ejemplo n.º 2
0
 public void updateGoingTo(Neuron n)
 {
     GoingTo = n;
 }
Ejemplo n.º 3
0
        /// <summary>
        /// Creates a neural net that will predict for cases with width predictors and hiddenlayers hidden layers (root and inputs not counted as layers).
        /// </summary>
        /// <param name="width"></param>
        /// <param name="hiddenLayers"></param>
        public NeuralNet(double LearningRate, int RandomSeed, int numInputs, int width, int hiddenLayers)
        {
            this.BaseLearningRate = LearningRate;
            CurrentLearningRate   = LearningRate;
            RSeed = RandomSeed;

            Width  = width + 1;             //accounting for bias values. The bias values are at Network[X, 0].
            Layers = hiddenLayers + 1;      //accounting for input layer

            Network = new Neuron[Layers][]; //one extra layer for inputs and one extra on width for bias.
            Root    = new Neuron(0, Width);
            //=======================================================================
            //set up stuff directly beneath the root
            Network[hiddenLayers] = new Neuron[Width];
            for (int j = 1; j < Width; j++)
            {
                Network[hiddenLayers][j] = new Neuron(1, Width); //these neurons only have one output, but a normal number of inputs
            }

            for (int i = 2; i < hiddenLayers; i++) // typical hidden layer items
            {
                Network[i] = new Neuron[Width];
                for (int j = 1; j < Width; j++)
                {
                    Network[i][j] = new Neuron(width, Width); //recieve input from all, including bias. Output to all on next layer, exluding bias. (x-1,x)
                }
            }

            //set up layer above input layer. It might have an abnormal number of inputs, but has the full number of neurons and outputs
            Network[1] = new Neuron[Width];
            for (int j = 1; j < Width; j++)
            {
                Network[1][j] = new Neuron(width, numInputs);
            }

            //Finally, set up all of the bias neurons. These have full outputs and no inputs
            for (int i = 1; i < hiddenLayers; i++)
            {
                Network[i][0] = new Neuron(width, 0);
            }
            Network[hiddenLayers][0] = new Neuron(1, 0); //the last one only has one output (points at the root)

            //set up input layer. It has fewer neurons than the rest of the layers.
            Network[0] = new Neuron[numInputs];
            for (int j = 0; j < numInputs; j++)
            {
                Network[0][j] = new Neuron(width, 0); //these neurons ARE the inputs, and have none, but a normal number of outputs
            }

            //=======================================================================
            //All neurons built. Now need to set up edges. Building from the top down.

            Edge[] edges = new Edge[Width];
            for (int j = 0; j < Width; j++) //set up the edges array
            {
                edges[j] = new Edge();
                edges[j].updateComingFrom(Network[hiddenLayers][j]); //adjust all edges to point different neurons on the top layer
            }

            Root.addIncomingEdges(edges, 0); //the root will be the only output for the top neurons. This also sets the outgoing edges at the other ends.

            //start from the top of the hidden layers, count down from there, but don't try to add inputs to the input layer (stop after "1")
            for (int i = hiddenLayers; i > 1; i--)
            {
                int minus = i - 1;
                for (int j = 0; j < Width; j++)                   //set up the edges array
                {
                    edges[j].updateComingFrom(Network[minus][j]); //adjust all edges to point different neurons on the next layer down
                }
                for (int j = 1; j < Width; j++)                   //now that the edges are set, add the incoming edges to each of the neurons on the current layer
                {                                                 //of course, the bias has no input, so skip over it in this loop. (we still count in in the other one though)
                    Network[i][j].addIncomingEdges(edges, j - 1); //add the edges to both ends of each
                }
            }

            //since the input layer is weird, account for that.
            edges = new Edge[numInputs];
            for (int j = 0; j < numInputs; j++) // set up the edges
            {
                edges[j] = new Edge();
                edges[j].updateComingFrom(Network[0][j]);     //adjust so they point at the inputs
            }
            for (int j = 1; j < Width; j++)                   //now that the edges are set, add the incoming edges to each of the neurons on the current layer
            {                                                 //of course, the bias has no input, so skip over it in this loop. (we still count in in the other one though)
                Network[1][j].addIncomingEdges(edges, j - 1); //add the edges to both ends of each
            }

            //=======================================================================
            //Lastly, we set all the bias terms hold 1 as their value

            for (int i = 1; i < Layers; i++) //there is no bias term on the input layer
            {
                Network[i][0].setValue(1);   //set all bias terms to be 1. We will never change these values (but we will change the outgoing weights)
            }

            //And that's that
        }
 public virtual void RemoveNextNeuron(Neuron neuron)
 {
     NextNeurons.Remove(neuron);
 }
 public override void AddNextNeuron(Neuron neuron, double?weight)
 {
     throw new Exception($"Нельзя соединить нейроны {neuron.NeuronName} и {NeuronName} прямой связью.");
 }
 public override void AddLastNeuron(Neuron neuron, double weight)
 {
     throw new Exception($"Нельзя соединить нейроны {neuron.NeuronName} и {NeuronName} обратной связью.");
 }