Пример #1
0
        public override WeightsMatrix WeightsUpdate(WeightsMatrix gradient)
        {
            WeightsMatrix result = gradient.Copy();

            result.Scale(-_stepSize / _batchSize);
            return(result);
        }
Пример #2
0
        public bool Equals(WeightsMatrix other)
        {
            if (other == null)
            {
                return(false);
            }

            if (this.NumberOfInputs != other.NumberOfInputs || this.NumberOfOutputs != other.NumberOfOutputs)
            {
                return(false);
            }

            double epsilon = 0.000000001;

            for (int i = 0; i < this.NumberOfOutputs; i++)
            {
                for (int j = 0; j < NumberOfInputs; j++)
                {
                    double difference = Math.Abs(this._matrix[i, j] - other._matrix[i, j]);
                    if (difference >= epsilon)
                    {
                        return(false);
                    }
                }
            }

            return(true);
        }
Пример #3
0
        public Layer(
            WeightsMatrix weights,
            NetworkVector biases,
            ActivationFunction activationfunction,
            DerivativeFunction derivativefunction
            )
            : base(weights.NumberOfOutputs, weights.NumberOfInputs)
        {
            if (activationfunction != null && derivativefunction == null)
            {
                throw new ArgumentException("derivativefunction cannot be null, if activatioin is not null");
            }

            if (weights == null || biases == null)
            {
                throw new ArgumentException("Attempt to make a layer with null weights or biases.");
            }

            _combiner = new WeightedCombiner(weights, biases);

            if (activationfunction == null)
            {
                _neuralFunction = null;
            }
            else
            {
                _neuralFunction = new NeuralFunction(_combiner.NumberOfOutputs, activationfunction, derivativefunction);
            }
        }
Пример #4
0
        public override WeightsMatrix WeightsGradient(NetworkVector outputgradient)
        {
            WeightsMatrix weightsGradient = new WeightsMatrix(Weights.NumberOfOutputs, Weights.NumberOfInputs);

            foreach (var pair in _segmentAndPair(VectorInput, outputgradient))
            {
                weightsGradient.Add(pair.Second.OuterProduct(pair.First));
            }

            return(weightsGradient);
        }
Пример #5
0
        public WeightedCombiner(WeightsMatrix weights, NetworkVector biases)
            : base(weights.NumberOfOutputs, weights.NumberOfInputs)
        {
            if (weights == null)
            {
                throw new ArgumentException("Attempt to make a WeightedCombineer with weights == null.");
            }

            if (biases == null)
            {
                throw new ArgumentException("Attempt to make a WeightedCombineer with biases == null.");
            }

            if (biases.Dimension != weights.NumberOfOutputs)
            {
                throw new ArgumentException("Dimension of biases must the the same of the outputs.");
            }

            Weights     = weights.Copy();
            Biases      = biases.Copy();
            VectorInput = new NetworkVector(weights.NumberOfInputs);
            //Output = new NetworkVector(weights.NumberOfOutputs);
        }
Пример #6
0
 public static Layer CreateLinearLayer(WeightsMatrix weights, NetworkVector biases)
 {
     return(new Layer(weights, biases, null, null));
 }
Пример #7
0
 public static Layer CreateLogisticLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new NetworkVector(weights.NumberOfOutputs), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
Пример #8
0
 public Layer(WeightsMatrix weights)
     : this(weights, new NetworkVector(weights.NumberOfOutputs), null, null)
 {
 }
Пример #9
0
 public Layer(WeightsMatrix weights, NetworkVector biases)
     : this(weights, biases, null, null)
 {
 }
Пример #10
0
 public static Layer CreateLogisticLayer(WeightsMatrix weights, NetworkVector biases)
 {
     return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
Пример #11
0
 public abstract WeightsMatrix WeightsUpdate(WeightsMatrix gradient);
Пример #12
0
 public static Layer CreateLinearLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new NetworkVector(weights.NumberOfOutputs), null, null));
 }
Пример #13
0
 public WeightedCombiner(WeightedCombiner combiner)
     : base(combiner.NumberOfOutputs, combiner.NumberOfInputs)
 {
     this.Biases  = combiner.Biases.Copy();
     this.Weights = combiner.Weights.Copy();
 }
Пример #14
0
 public WeightedCombiner(WeightsMatrix weights)
     : this(weights, new NetworkVector(weights.NumberOfOutputs))
 {
 }
Пример #15
0
 public void Add(WeightsMatrix other)
 {
     _matrix = _matrix.Add(other._matrix);
 }
Пример #16
0
 public void Subtract(WeightsMatrix other)
 {
     _matrix = _matrix.Subtract(other._matrix);
 }
Пример #17
0
 public TrainableComponent(int numberofoutputs, int numberofinputs)
 {
     _weightsGradientAccumulator = new WeightsMatrix(numberofoutputs, numberofinputs);
     _biasesGradientAccumulator  = new NetworkVector(numberofoutputs);
 }