Exemplo n.º 1
0
        public TrainableComponent(WeightsMatrix weights, BiasesVector biases)
            : base(weights.NumberOfInputs, weights.NumberOfOutputs)
        {
            if (weights.NumberOfOutputs != biases.Dimension)
            {
                throw new ArgumentException("Mismatched WeightsMatrix and BiasesVector.");
            }

            _weights = weights;
            _biases  = biases;

            Strategy = new NeuralNet.GradientDescent();  // default  - keep this here?
        }
Exemplo n.º 2
0
        public Layer(
            WeightsMatrix weights,
            BiasesVector biases,
            ActivationFunction activationfunction,
            DerivativeFunction derivativefunction
            )
            : base(weights, biases)
        {
            if (weights == null || biases == null)
            {
                throw new ArgumentException("Attempt to make a layer with null weights or biases.");
            }


            if (activationfunction == null)
            {
                _neuralFunction = null;
            }
            else
            {
                _neuralFunction = new NeuralFunction(NumberOfOutputs, activationfunction, derivativefunction);
            }
        }
Exemplo n.º 3
0
 public WeightsMatrix Add(WeightsMatrix other)
 {
     return(new WeightsMatrix(AddMatrices(this, other)));
 }
Exemplo n.º 4
0
        public override NeuralNet2.WeightsMatrix WeightsUpdate(NeuralNet2.WeightsMatrix gradient)
        {
            Matrix delta = gradient.Scale(-_stepSize / _batchSize);

            return(new NeuralNet2.WeightsMatrix(delta));
        }
Exemplo n.º 5
0
 public abstract NeuralNet2.WeightsMatrix WeightsUpdate(NeuralNet2.WeightsMatrix gradient);
Exemplo n.º 6
0
 public static Layer CreateLinearLayer(WeightsMatrix weights, BiasesVector biases)
 {
     return(new Layer(weights, biases, null, null));
 }
Exemplo n.º 7
0
 public Layer(WeightsMatrix weights)
     : this(weights, new BiasesVector(weights.NumberOfOutputs), null, null)
 {
 }
Exemplo n.º 8
0
 public Layer(WeightsMatrix weights, BiasesVector biases)
     : this(weights, biases, null, null)
 {
 }
Exemplo n.º 9
0
 public static Layer CreateLogisticLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new BiasesVector(weights.NumberOfOutputs), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
Exemplo n.º 10
0
 public static Layer CreateLogisticLayer(WeightsMatrix weights, BiasesVector biases)
 {
     return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
Exemplo n.º 11
0
 public static Layer CreateLinearLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new BiasesVector(weights.NumberOfOutputs), null, null));
 }
Exemplo n.º 12
0
        protected override void _updateWeights(VectorBatch outputGradient)
        {
            WeightsMatrix weightsGradient = WeightsMatrix.FromVectorBatchPair(_input, _activationGradient);

            _weights = _weights.Add(Strategy.WeightsUpdate(weightsGradient));
        }
Exemplo n.º 13
0
 public WeightedCombiner(WeightsMatrix weights)
     : this(weights, new BiasesVector(weights.NumberOfOutputs))
 {
 }
Exemplo n.º 14
0
 public WeightedCombiner(WeightsMatrix weights, BiasesVector biases)
     : base(weights, biases)
 {
 }