public TrainableComponent(WeightsMatrix weights, BiasesVector biases) : base(weights.NumberOfInputs, weights.NumberOfOutputs) { if (weights.NumberOfOutputs != biases.Dimension) { throw new ArgumentException("Mismatched WeightsMatrix and BiasesVector."); } _weights = weights; _biases = biases; Strategy = new NeuralNet.GradientDescent(); // default - keep this here? }
public Layer( WeightsMatrix weights, BiasesVector biases, ActivationFunction activationfunction, DerivativeFunction derivativefunction ) : base(weights, biases) { if (weights == null || biases == null) { throw new ArgumentException("Attempt to make a layer with null weights or biases."); } if (activationfunction == null) { _neuralFunction = null; } else { _neuralFunction = new NeuralFunction(NumberOfOutputs, activationfunction, derivativefunction); } }
public BiasesVector Subtract(BiasesVector other) { return(new BiasesVector(SubtractMatrices(this, other).Row(0))); }
public override BiasesVector BiasesUpdate(NeuralNet2.BiasesVector gradient) { Matrix delta = gradient.Scale(-_stepSize / _batchSize); return(new BiasesVector(delta)); }
public abstract NeuralNet2.BiasesVector BiasesUpdate(NeuralNet2.BiasesVector gradient);
public static Layer CreateLinearLayer(WeightsMatrix weights, BiasesVector biases) { return(new Layer(weights, biases, null, null)); }
public Layer(WeightsMatrix weights, BiasesVector biases) : this(weights, biases, null, null) { }
public static Layer CreateLogisticLayer(WeightsMatrix weights, BiasesVector biases) { return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative)); }
protected override void _updateBiases(VectorBatch outputGradient) { BiasesVector biasesGradient = new BiasesVector(_activationGradient.SumColumnsAsMatrix()); _biases = _biases.Add(Strategy.BiasesUpdate(biasesGradient)); }
public WeightedCombiner(WeightsMatrix weights, BiasesVector biases) : base(weights, biases) { }