public override WeightsMatrix WeightsUpdate(WeightsMatrix gradient) { WeightsMatrix result = gradient.Copy(); result.Scale(-_stepSize / _batchSize); return(result); }
public bool Equals(WeightsMatrix other) { if (other == null) { return(false); } if (this.NumberOfInputs != other.NumberOfInputs || this.NumberOfOutputs != other.NumberOfOutputs) { return(false); } double epsilon = 0.000000001; for (int i = 0; i < this.NumberOfOutputs; i++) { for (int j = 0; j < NumberOfInputs; j++) { double difference = Math.Abs(this._matrix[i, j] - other._matrix[i, j]); if (difference >= epsilon) { return(false); } } } return(true); }
public Layer( WeightsMatrix weights, NetworkVector biases, ActivationFunction activationfunction, DerivativeFunction derivativefunction ) : base(weights.NumberOfOutputs, weights.NumberOfInputs) { if (activationfunction != null && derivativefunction == null) { throw new ArgumentException("derivativefunction cannot be null, if activatioin is not null"); } if (weights == null || biases == null) { throw new ArgumentException("Attempt to make a layer with null weights or biases."); } _combiner = new WeightedCombiner(weights, biases); if (activationfunction == null) { _neuralFunction = null; } else { _neuralFunction = new NeuralFunction(_combiner.NumberOfOutputs, activationfunction, derivativefunction); } }
public override WeightsMatrix WeightsGradient(NetworkVector outputgradient) { WeightsMatrix weightsGradient = new WeightsMatrix(Weights.NumberOfOutputs, Weights.NumberOfInputs); foreach (var pair in _segmentAndPair(VectorInput, outputgradient)) { weightsGradient.Add(pair.Second.OuterProduct(pair.First)); } return(weightsGradient); }
public WeightedCombiner(WeightsMatrix weights, NetworkVector biases) : base(weights.NumberOfOutputs, weights.NumberOfInputs) { if (weights == null) { throw new ArgumentException("Attempt to make a WeightedCombineer with weights == null."); } if (biases == null) { throw new ArgumentException("Attempt to make a WeightedCombineer with biases == null."); } if (biases.Dimension != weights.NumberOfOutputs) { throw new ArgumentException("Dimension of biases must the the same of the outputs."); } Weights = weights.Copy(); Biases = biases.Copy(); VectorInput = new NetworkVector(weights.NumberOfInputs); //Output = new NetworkVector(weights.NumberOfOutputs); }
public static Layer CreateLinearLayer(WeightsMatrix weights, NetworkVector biases) { return(new Layer(weights, biases, null, null)); }
public static Layer CreateLogisticLayer(WeightsMatrix weights) { return(new Layer(weights, new NetworkVector(weights.NumberOfOutputs), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative)); }
public Layer(WeightsMatrix weights) : this(weights, new NetworkVector(weights.NumberOfOutputs), null, null) { }
public Layer(WeightsMatrix weights, NetworkVector biases) : this(weights, biases, null, null) { }
public static Layer CreateLogisticLayer(WeightsMatrix weights, NetworkVector biases) { return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative)); }
public abstract WeightsMatrix WeightsUpdate(WeightsMatrix gradient);
public static Layer CreateLinearLayer(WeightsMatrix weights) { return(new Layer(weights, new NetworkVector(weights.NumberOfOutputs), null, null)); }
public WeightedCombiner(WeightedCombiner combiner) : base(combiner.NumberOfOutputs, combiner.NumberOfInputs) { this.Biases = combiner.Biases.Copy(); this.Weights = combiner.Weights.Copy(); }
public WeightedCombiner(WeightsMatrix weights) : this(weights, new NetworkVector(weights.NumberOfOutputs)) { }
public void Add(WeightsMatrix other) { _matrix = _matrix.Add(other._matrix); }
public void Subtract(WeightsMatrix other) { _matrix = _matrix.Subtract(other._matrix); }
public TrainableComponent(int numberofoutputs, int numberofinputs) { _weightsGradientAccumulator = new WeightsMatrix(numberofoutputs, numberofinputs); _biasesGradientAccumulator = new NetworkVector(numberofoutputs); }