public DenseLayer(int numNeurons, int numNeuronsNext, IActivationFunc activationFunc, IInitialization initialization)
 {
     weights             = new Weights(numNeurons, numNeuronsNext, initialization);
     bias                = new Bias(numNeuronsNext);
     gradWeights         = new Weights(numNeurons, numNeuronsNext, initialization);
     gradBias            = new Bias(numNeuronsNext);
     this.activationFunc = activationFunc;
 }
예제 #2
0
        public NeuralNetwork WithLayerWithCommonActivationFunction(int neuronCount, IActivationFunc activationFunc)
        {
            var layer = createNewLayer_();

            for (int i = 0; i < neuronCount; i++)
            {
                var neuron = new Neuron(activationFunc);
                layer.AddNeuron(neuron);
            }

            initializeLinks_(layer);
            addLayer_(layer);

            return(this);
        }
예제 #3
0
        private void Backward(Matrix <float> X, Vector <float> y)
        {
            DenseLayer layer = layers.Last();

            Matrix <float> delta = deltas.Last();

            for (int i = layers.Length - 1; i >= 0; i--)
            {
                if (i == layers.Length - 1)
                {
                    SetOutputDelta(X, y);
                    continue;
                }

                DenseLayer      prevLayer      = layers[i];
                Matrix <float>  W              = layer.Weights.Vals;
                Matrix <float>  preActivation  = preActivations[i];
                DropoutLayer    dropout        = dropouts[i];
                IActivationFunc activationFunc = prevLayer.ActivationFunc;
                Matrix <float>  deltaNew       = deltas[i];
                delta.TransposeAndMultiply(W, deltaNew);
                preActivation.MapInplace(activationFunc.dF, Zeros.Include);
                deltaNew.PointwiseMultiply(preActivation, deltaNew);
                delta = deltaNew;
                layer = prevLayer;
            }

            for (int i = layers.Length - 1; i >= 0; i--)
            {
                layer = layers[i];
                delta = deltas[i];
                Matrix <float> gradWeights = layer.GradWeights.Vals;
                Matrix <float> gradBias    = layer.GradBias.Vals;
                Matrix <float> a           = activations[i];
                a.TransposeThisAndMultiply(delta, gradWeights);
                gradWeights.Divide(miniBatchSize, gradWeights);
                Vector <float> gradBiasVect = delta.ColumnSums();
                gradBiasVect.Divide(miniBatchSize, gradBiasVect);
                gradBias.SetRow(0, gradBiasVect);
            }
        }
예제 #4
0
        public float[][] Forward(Matrix <float> X, bool pred)
        {
            Matrix <float>[] activations;

            if (pred == false)
            {
                activations = this.activations;
            }
            else
            {
                activations = this.singleActivations;
            }

            activations[0] = X;

            Matrix <float> a = activations[0];

            for (int i = 0; i < layers.Length; i++)
            {
                DenseLayer      layer          = layers[i];
                Matrix <float>  W              = layer.Weights.Vals;
                Matrix <float>  b              = layer.Bias.Vals;
                IActivationFunc activationFunc = layer.ActivationFunc;

                DropoutLayer dropout = null;

                if (dropouts != null)
                {
                    dropout = dropouts[i];
                }



                if (pred == false)
                {
                    b = layer.Bias.Broadcast(miniBatchSize);
                }

                Matrix <float> aNext = activations[i + 1];
                a.Multiply(W, aNext);
                aNext.Add(b, aNext);

                if (pred == false)
                {
                    aNext.CopyTo(preActivations[i]);
                }

                if (activationFunc.GetType() != typeof(Linear))
                {
                    aNext.MapInplace(activationFunc.F, Zeros.Include);
                }

                if (pred == false && dropout != null)
                {
                    dropout.Sample();
                    aNext.PointwiseMultiply(dropout.Vals, aNext);
                }


                a = aNext;
            }

            return(a.ToColumnArrays());
        }
 public LayerSettings(int nodeSize, IActivationFunc ActFunc) : this()
 {
     this.NodeCount          = nodeSize;
     this.ActivationFunction = ActFunc;
 }
예제 #6
0
 public Neuron(IActivationFunc activationFunc)
 {
     ActivationFunc = activationFunc;
     initializeRandom_();
 }
예제 #7
0
파일: Neuron.cs 프로젝트: rustamz/Stones
 /// <summary>
 /// Конструктор с установкой веса
 /// </summary>
 /// <param name="Weight">Устанавливаемый вес.</param>
 /// <param name="ActFunc">Функция активации.</param>
 public Neuron(double Weight, IActivationFunc ActFunc)
 {
     this.Weight = Weight;
     this.actFunc = ActFunc.Clone();
 }
예제 #8
0
파일: Neuron.cs 프로젝트: rustamz/Stones
 /// <summary>
 /// Конструктор с установкой функции активации
 /// </summary>
 /// <param name="ActFunc">Функция активации.</param>
 public Neuron(IActivationFunc ActFunc)
 {
     this.actFunc = ActFunc.Clone();
 }