예제 #1
0
        public void CanRunWithZeroInput()
        {
            Layer layer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1 }
            }));
            NetComponentChain layerlist = new NetComponentChain(layer);

            NeuralNet.NetworkVector vector = new NeuralNet.NetworkVector(new double[] { 0 });
            NetworkVector           result = layerlist.Run(vector);

            Assert.AreEqual(vector, result);
        }
예제 #2
0
        public void CannotRunWithInputOfWrongSize()
        {
            Layer layer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1 }
            }));
            NetComponentChain layerlist = new NetComponentChain(layer);

            NeuralNet.NetworkVector input = new NeuralNet.NetworkVector(new double[] { 0, 0 });

            try
            {
                layerlist.Run(input);
                Assert.Fail("Run should throw an ArgumentException for input of the wrong size, but did not.");
            }
            catch (ArgumentException)
            { }
        }
예제 #3
0
        public void CanRunTwoLayersWithZeroInput()
        {
            Layer layer1 = new Layer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1 }, { 1, 1 }
            }));
            Layer layer2 = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1 }
            }));
            NetComponentChain layerlist = new NetComponentChain(layer1);

            layerlist.AddFixed(layer2);
            NeuralNet.NetworkVector vector = new NeuralNet.NetworkVector(new double[] { 0, 0 });
            NetworkVector           result = layerlist.Run(vector);


            NeuralNet.NetworkVector outputCheck = new NeuralNet.NetworkVector(new double[] { 0 });
            Assert.AreEqual(outputCheck, result);
        }
예제 #4
0
        public void CanRunTwoLayerNetWithOneInput()
        {
            Layer inputlayer = new Layer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1, 1 }, { 1, 1, 1 }
            }));
            Layer outputlayer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1 }
            }));
            NetComponentChain network = new NetComponentChain();

            network.AddFixed(inputlayer);
            network.AddTrainable(outputlayer);

            NeuralNet.NetworkVector inputvector = new NeuralNet.NetworkVector(new double[] { 1, 0, 0 });
            NetworkVector           result      = network.Run(inputvector);

            NeuralNet.NetworkVector outputCheck = new NeuralNet.NetworkVector(new double[] { 2 });
            Assert.AreEqual(outputCheck, result);
        }
예제 #5
0
        public void BackPropagate(NetworkVector outputgradient)
        {
            NetworkVector currentGradient = outputgradient.Copy();
            NetComponent  currentComponent;

            _networkComponentNode node = _tail;
            {
                while (node != null)
                {
                    currentComponent = node.Component;
                    if (node.IsTrainable)
                    {
                        (currentComponent as TrainableComponent).BackPropagate(currentGradient);
                    }

                    currentGradient = currentComponent.InputGradient(currentGradient);
                    node            = node.Previous;
                }
            }
        }
예제 #6
0
        public void CanBackPropagateTwoLayerNetGradient1()
        {
            Layer inputlayer = new Layer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1, 1 }, { 1, 1, 1 }
            }));
            Layer outputlayer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1 }
            }));
            NetComponentChain network = new NetComponentChain();

            network.AddFixed(inputlayer);
            network.AddTrainable(outputlayer);

            NeuralNet.NetworkVector inputvector    = new NeuralNet.NetworkVector(new double[] { 1, 0, 0 });
            NeuralNet.NetworkVector outputgradient = new NeuralNet.NetworkVector(new double[] { 1 });

            network.Run(inputvector);

            NeuralNet.NetworkVector inputGradientCheck = new NeuralNet.NetworkVector(new double[] { 2, 2, 2 });
            Assert.AreEqual(inputGradientCheck, network.InputGradient(outputgradient));
        }
예제 #7
0
        public override NetworkVector Run(NetworkVector input)
        {
            if (NumberOfComponents == 0)
            {
                throw new InvalidOperationException("Attempt to run a network with no layers.");
            }

            if (input.Dimension != NumberOfInputs)
            {
                throw new ArgumentException(string.Format("The network accepts {0} inputs, but input has dimension {1}", NumberOfInputs, input.Dimension));
            }

            NetworkVector result = input;

            foreach (NetComponent component in ForwardEnumeration)
            {
                result = component.Run(result);
            }

            return(result);
        }
예제 #8
0
        public override NetworkVector InputGradient(NetworkVector outputgradient)
        {
            if (NumberOfComponents == 0)
            {
                throw new InvalidOperationException("Attempt to back propogate in a network with no layers.");
            }

            if (outputgradient.Dimension != _tail.Component.NumberOfOutputs)
            {
                throw new ArgumentException(string.Format("The network has {0} outputs, but outputgradient has dimension {1}", _tail.Component.NumberOfOutputs, outputgradient.Dimension));
            }

            NetworkVector gradient = outputgradient.Copy();

            foreach (NetComponent component in BackwardsEnumeration)
            {
                gradient = component.InputGradient(gradient);
            }

            return(gradient);
        }
예제 #9
0
        public WeightedCombiner(WeightsMatrix weights, NetworkVector biases)
            : base(weights.NumberOfOutputs, weights.NumberOfInputs)
        {
            if (weights == null)
            {
                throw new ArgumentException("Attempt to make a WeightedCombineer with weights == null.");
            }

            if (biases == null)
            {
                throw new ArgumentException("Attempt to make a WeightedCombineer with biases == null.");
            }

            if (biases.Dimension != weights.NumberOfOutputs)
            {
                throw new ArgumentException("Dimension of biases must the the same of the outputs.");
            }

            Weights     = weights.Copy();
            Biases      = biases.Copy();
            VectorInput = new NetworkVector(weights.NumberOfInputs);
            //Output = new NetworkVector(weights.NumberOfOutputs);
        }
예제 #10
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public static Layer CreateLogisticLayer(WeightsMatrix weights, NetworkVector biases)
 {
     return(new Layer(weights, biases, NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative));
 }
예제 #11
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public override void BackPropagate(NetworkVector outputgradient, NetworkVector input)
 {
     _biasesGradientAccumulator.Add(BiasesGradient(outputgradient));
     _weightsGradientAccumulator.Add(WeightsGradient(outputgradient, input));
 }
예제 #12
0
 public override NetworkVector BiasesGradient(NetworkVector outputgradient)
 {
     return(NetworkVector.Sum(_segment(outputgradient)));
 }
예제 #13
0
 protected IEnumerable <VectorPair> _segmentAndPair(NetworkVector first, NetworkVector second)
 {
     return(_segment(first).Zip(_segment(second), (a, b) => new VectorPair(a, b)));
 }
예제 #14
0
 protected List <NetworkVector> _segment(NetworkVector vectortoSegment)
 {
     return(vectortoSegment.Segment(_repetitions));
 }
예제 #15
0
 public WeightedCombiner(NetworkMatrix weights, NetworkVector biases)
     : this(weights, biases, TrainingMode.ONLINE)
 {
 }
예제 #16
0
 public WeightsMatrix WeightsGradient(NetworkVector outputgradient, NetworkVector input)
 {
     return(outputgradient.OuterProduct(input));
 }
예제 #17
0
 public NetworkVector LeftMultiply(NetworkVector vector)
 {
     return(new NetworkVector(_matrix.Multiply(vector.Vector)));
 }
예제 #18
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public WeightsMatrix WeightsGradient(NetworkVector outputgradient, NetworkVector input)
 {
     return(_combiner.WeightsGradient(ActivationGradient(outputgradient), input));
 }
예제 #19
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public override WeightsMatrix WeightsGradient(NetworkVector outputgradient)
 {
     return(_combiner.WeightsGradient(ActivationGradient(outputgradient)));
 }
예제 #20
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public Layer(WeightsMatrix weights, NetworkVector biases)
     : this(weights, biases, null, null)
 {
 }
예제 #21
0
        public NetworkVector InputGradient(NetworkVector outputgradient, NetworkVector input, NetworkVector output)
        {
            if (outputgradient == null || outputgradient.Dimension != NumberOfOutputs)
            {
                throw new ArgumentException("outputgradient may not be null and must have dimension equal to NumberOfNeurons.");
            }

            if (_neuralFunctionDerivative == null)
            {
                return(outputgradient.Copy());
            }

            return(NetworkVector.ApplyFunctionComponentWise(_inputVector, _outputVector, (x, y) => _neuralFunctionDerivative(x, y)));

            //NetworkVector derivative = NetworkVector.ApplyFunctionComponentWise(_inputVector, _outputVector, (x, y) => _neuralFunctionDerivative(x, y));
            //NetworkVector result = NetworkVector.ApplyFunctionComponentWise(derivative, outputgradient, (x, y) => x * y);
            //return result;
        }
예제 #22
0
        protected double _errorFunction(NetworkVector result, NetworkVector expected)
        {
            NetworkVector _getSquaredErrors = NetworkVector.ApplyFunctionComponentWise(result, expected, (x, y) => (x - y) * (x - y));

            return(_getSquaredErrors.SumValues() / 2);
        }
예제 #23
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public override NetworkVector BiasesGradient(NetworkVector outputgradient)
 {
     return(_combiner.BiasesGradient(ActivationGradient(outputgradient)));
 }
예제 #24
0
 public abstract NetworkVector InputGradient(NetworkVector outputgradient);
예제 #25
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public static Layer CreateLinearLayer(WeightsMatrix weights, NetworkVector biases)
 {
     return(new Layer(weights, biases, null, null));
 }
예제 #26
0
 public abstract NetworkVector Run(NetworkVector input);
예제 #27
0
파일: Layer.cs 프로젝트: pscrv/NeuralNet
 public override void BackPropagate(NetworkVector outputgradient)
 {
     BackPropagate(outputgradient, VectorInput);
 }
예제 #28
0
 public OnlineWeightedCombiner(NetworkMatrix weights, NetworkVector biases)
     : base(weights, biases)
 {
 }
예제 #29
0
 public LayerState(NetworkMatrix weights, NetworkVector biases)
 {
     Biases  = biases.ToArray();
     Weights = weights.ToArray();
 }
예제 #30
0
 public BatchWeightedCombiner(NetworkMatrix weights, NetworkVector biases)
     : base(weights, biases)
 {
     _biasesDelta  = new NetworkVector(NumberOfOutputs);
     _weightsDelta = new NetworkMatrix(NumberOfOutputs, NumberOfInputs);
 }