Exemple #1
0
        public TrainableComponent(WeightsMatrix weights, BiasesVector biases)
            : base(weights.NumberOfInputs, weights.NumberOfOutputs)
        {
            if (weights.NumberOfOutputs != biases.Dimension)
            {
                throw new ArgumentException("Mismatched WeightsMatrix and BiasesVector.");
            }

            _weights = weights;
            _biases  = biases;

            Strategy = new NeuralNet.GradientDescent();  // default  - keep this here?
        }
Exemple #2
0
        public Layer(
            WeightsMatrix weights,
            BiasesVector biases,
            ActivationFunction activationfunction,
            DerivativeFunction derivativefunction
            )
            : base(weights, biases)
        {
            if (weights == null || biases == null)
            {
                throw new ArgumentException("Attempt to make a layer with null weights or biases.");
            }


            if (activationfunction == null)
            {
                _neuralFunction = null;
            }
            else
            {
                _neuralFunction = new NeuralFunction(NumberOfOutputs, activationfunction, derivativefunction);
            }
        }
Exemple #3
0
 public WeightsMatrix Add(WeightsMatrix other)
 {
     return(new WeightsMatrix(AddMatrices(this, other)));
 }
Exemple #4
0
        public override NeuralNet2.WeightsMatrix WeightsUpdate(NeuralNet2.WeightsMatrix gradient)
        {
            Matrix delta = gradient.Scale(-_stepSize / _batchSize);

            return(new NeuralNet2.WeightsMatrix(delta));
        }
Exemple #5
0
 public abstract NeuralNet2.WeightsMatrix WeightsUpdate(NeuralNet2.WeightsMatrix gradient);
Exemple #6
0
 public static Layer CreateLinearLayer(WeightsMatrix weights, BiasesVector biases)
 {
     return(new Layer(weights, biases, null, null));
 }
Exemple #7
0
 public Layer(WeightsMatrix weights)
     : this(weights, new BiasesVector(weights.NumberOfOutputs), null, null)
 {
 }
Exemple #8
0
 public Layer(WeightsMatrix weights, BiasesVector biases)
     : this(weights, biases, null, null)
 {
 }
Exemple #9
0
 public static Layer CreateLogisticLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new BiasesVector(weights.NumberOfOutputs), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
Exemple #10
0
 public static Layer CreateLogisticLayer(WeightsMatrix weights, BiasesVector biases)
 {
     return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
Exemple #11
0
 public static Layer CreateLinearLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new BiasesVector(weights.NumberOfOutputs), null, null));
 }
Exemple #12
0
        protected override void _updateWeights(VectorBatch outputGradient)
        {
            WeightsMatrix weightsGradient = WeightsMatrix.FromVectorBatchPair(_input, _activationGradient);

            _weights = _weights.Add(Strategy.WeightsUpdate(weightsGradient));
        }
Exemple #13
0
 public WeightedCombiner(WeightsMatrix weights)
     : this(weights, new BiasesVector(weights.NumberOfOutputs))
 {
 }
Exemple #14
0
 public WeightedCombiner(WeightsMatrix weights, BiasesVector biases)
     : base(weights, biases)
 {
 }