예제 #1
0
        public override WeightsMatrix WeightsUpdate(WeightsMatrix gradient)
        {
            WeightsMatrix result = gradient.Copy();

            result.Scale(-_stepSize / _batchSize);
            return(result);
        }
예제 #2
0
        public bool Equals(WeightsMatrix other)
        {
            if (other == null)
            {
                return(false);
            }

            if (this.NumberOfInputs != other.NumberOfInputs || this.NumberOfOutputs != other.NumberOfOutputs)
            {
                return(false);
            }

            double epsilon = 0.000000001;

            for (int i = 0; i < this.NumberOfOutputs; i++)
            {
                for (int j = 0; j < NumberOfInputs; j++)
                {
                    double difference = Math.Abs(this._matrix[i, j] - other._matrix[i, j]);
                    if (difference >= epsilon)
                    {
                        return(false);
                    }
                }
            }

            return(true);
        }
예제 #3
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
        public Layer(
            WeightsMatrix weights,
            NetworkVector biases,
            ActivationFunction activationfunction,
            DerivativeFunction derivativefunction
            )
            : base(weights.NumberOfOutputs, weights.NumberOfInputs)
        {
            if (activationfunction != null && derivativefunction == null)
            {
                throw new ArgumentException("derivativefunction cannot be null, if activatioin is not null");
            }

            if (weights == null || biases == null)
            {
                throw new ArgumentException("Attempt to make a layer with null weights or biases.");
            }

            _combiner = new WeightedCombiner(weights, biases);

            if (activationfunction == null)
            {
                _neuralFunction = null;
            }
            else
            {
                _neuralFunction = new NeuralFunction(_combiner.NumberOfOutputs, activationfunction, derivativefunction);
            }
        }
예제 #4
0
        public override WeightsMatrix WeightsGradient(NetworkVector outputgradient)
        {
            WeightsMatrix weightsGradient = new WeightsMatrix(Weights.NumberOfOutputs, Weights.NumberOfInputs);

            foreach (var pair in _segmentAndPair(VectorInput, outputgradient))
            {
                weightsGradient.Add(pair.Second.OuterProduct(pair.First));
            }

            return(weightsGradient);
        }
예제 #5
0
        public WeightedCombiner(WeightsMatrix weights, NetworkVector biases)
            : base(weights.NumberOfOutputs, weights.NumberOfInputs)
        {
            if (weights == null)
            {
                throw new ArgumentException("Attempt to make a WeightedCombineer with weights == null.");
            }

            if (biases == null)
            {
                throw new ArgumentException("Attempt to make a WeightedCombineer with biases == null.");
            }

            if (biases.Dimension != weights.NumberOfOutputs)
            {
                throw new ArgumentException("Dimension of biases must the the same of the outputs.");
            }

            Weights     = weights.Copy();
            Biases      = biases.Copy();
            VectorInput = new NetworkVector(weights.NumberOfInputs);
            //Output = new NetworkVector(weights.NumberOfOutputs);
        }
예제 #6
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public static Layer CreateLinearLayer(WeightsMatrix weights, NetworkVector biases)
 {
     return(new Layer(weights, biases, null, null));
 }
예제 #7
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public static Layer CreateLogisticLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new NetworkVector(weights.NumberOfOutputs), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
예제 #8
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public Layer(WeightsMatrix weights)
     : this(weights, new NetworkVector(weights.NumberOfOutputs), null, null)
 {
 }
예제 #9
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public Layer(WeightsMatrix weights, NetworkVector biases)
     : this(weights, biases, null, null)
 {
 }
예제 #10
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public static Layer CreateLogisticLayer(WeightsMatrix weights, NetworkVector biases)
 {
     return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
예제 #11
0
 public abstract WeightsMatrix WeightsUpdate(WeightsMatrix gradient);
예제 #12
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public static Layer CreateLinearLayer(WeightsMatrix weights)
 {
     return(new Layer(weights, new NetworkVector(weights.NumberOfOutputs), null, null));
 }
예제 #13
0
 public WeightedCombiner(WeightedCombiner combiner)
     : base(combiner.NumberOfOutputs, combiner.NumberOfInputs)
 {
     this.Biases  = combiner.Biases.Copy();
     this.Weights = combiner.Weights.Copy();
 }
예제 #14
0
 public WeightedCombiner(WeightsMatrix weights)
     : this(weights, new NetworkVector(weights.NumberOfOutputs))
 {
 }
예제 #15
0
 public void Add(WeightsMatrix other)
 {
     _matrix = _matrix.Add(other._matrix);
 }
예제 #16
0
 public void Subtract(WeightsMatrix other)
 {
     _matrix = _matrix.Subtract(other._matrix);
 }
예제 #17
0
 public TrainableComponent(int numberofoutputs, int numberofinputs)
 {
     _weightsGradientAccumulator = new WeightsMatrix(numberofoutputs, numberofinputs);
     _biasesGradientAccumulator  = new NetworkVector(numberofoutputs);
 }