public DenseLayer(int numNeurons, int numNeuronsNext, IActivationFunc activationFunc, IInitialization initialization) { weights = new Weights(numNeurons, numNeuronsNext, initialization); bias = new Bias(numNeuronsNext); gradWeights = new Weights(numNeurons, numNeuronsNext, initialization); gradBias = new Bias(numNeuronsNext); this.activationFunc = activationFunc; }
public NeuralNetwork WithLayerWithCommonActivationFunction(int neuronCount, IActivationFunc activationFunc) { var layer = createNewLayer_(); for (int i = 0; i < neuronCount; i++) { var neuron = new Neuron(activationFunc); layer.AddNeuron(neuron); } initializeLinks_(layer); addLayer_(layer); return(this); }
private void Backward(Matrix <float> X, Vector <float> y) { DenseLayer layer = layers.Last(); Matrix <float> delta = deltas.Last(); for (int i = layers.Length - 1; i >= 0; i--) { if (i == layers.Length - 1) { SetOutputDelta(X, y); continue; } DenseLayer prevLayer = layers[i]; Matrix <float> W = layer.Weights.Vals; Matrix <float> preActivation = preActivations[i]; DropoutLayer dropout = dropouts[i]; IActivationFunc activationFunc = prevLayer.ActivationFunc; Matrix <float> deltaNew = deltas[i]; delta.TransposeAndMultiply(W, deltaNew); preActivation.MapInplace(activationFunc.dF, Zeros.Include); deltaNew.PointwiseMultiply(preActivation, deltaNew); delta = deltaNew; layer = prevLayer; } for (int i = layers.Length - 1; i >= 0; i--) { layer = layers[i]; delta = deltas[i]; Matrix <float> gradWeights = layer.GradWeights.Vals; Matrix <float> gradBias = layer.GradBias.Vals; Matrix <float> a = activations[i]; a.TransposeThisAndMultiply(delta, gradWeights); gradWeights.Divide(miniBatchSize, gradWeights); Vector <float> gradBiasVect = delta.ColumnSums(); gradBiasVect.Divide(miniBatchSize, gradBiasVect); gradBias.SetRow(0, gradBiasVect); } }
public float[][] Forward(Matrix <float> X, bool pred) { Matrix <float>[] activations; if (pred == false) { activations = this.activations; } else { activations = this.singleActivations; } activations[0] = X; Matrix <float> a = activations[0]; for (int i = 0; i < layers.Length; i++) { DenseLayer layer = layers[i]; Matrix <float> W = layer.Weights.Vals; Matrix <float> b = layer.Bias.Vals; IActivationFunc activationFunc = layer.ActivationFunc; DropoutLayer dropout = null; if (dropouts != null) { dropout = dropouts[i]; } if (pred == false) { b = layer.Bias.Broadcast(miniBatchSize); } Matrix <float> aNext = activations[i + 1]; a.Multiply(W, aNext); aNext.Add(b, aNext); if (pred == false) { aNext.CopyTo(preActivations[i]); } if (activationFunc.GetType() != typeof(Linear)) { aNext.MapInplace(activationFunc.F, Zeros.Include); } if (pred == false && dropout != null) { dropout.Sample(); aNext.PointwiseMultiply(dropout.Vals, aNext); } a = aNext; } return(a.ToColumnArrays()); }
public LayerSettings(int nodeSize, IActivationFunc ActFunc) : this() { this.NodeCount = nodeSize; this.ActivationFunction = ActFunc; }
public Neuron(IActivationFunc activationFunc) { ActivationFunc = activationFunc; initializeRandom_(); }
/// <summary> /// Конструктор с установкой веса /// </summary> /// <param name="Weight">Устанавливаемый вес.</param> /// <param name="ActFunc">Функция активации.</param> public Neuron(double Weight, IActivationFunc ActFunc) { this.Weight = Weight; this.actFunc = ActFunc.Clone(); }
/// <summary> /// Конструктор с установкой функции активации /// </summary> /// <param name="ActFunc">Функция активации.</param> public Neuron(IActivationFunc ActFunc) { this.actFunc = ActFunc.Clone(); }