Example #1
0
        public void CanAddTrainable()
        {
            WeightedCombiner  wc        = new WeightedCombiner(new NeuralNet.WeightsMatrix(1, 1));
            NetComponentChain layerlist = new NetComponentChain();

            layerlist.AddTrainable(wc);
            List <NetComponent> allComponents       = new List <NetComponent>(layerlist.ForwardEnumeration);
            List <NetComponent> trainableComponents = new List <NetComponent>(layerlist.ForwardTrainableComponentsEnumeration);

            Assert.AreEqual(1, layerlist.NumberOfComponents);
            Assert.IsTrue(allComponents.Contains(wc));
            Assert.IsTrue(trainableComponents.Contains(wc));
        }
Example #2
0
        public void CannotAddLayerOfWrongSize()
        {
            Layer layer1 = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1 }
            }));
            Layer layer2 = Layer.CreateLogisticLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 2 }
            }));
            NetComponentChain layerlist = new NetComponentChain(layer1);

            try
            {
                layerlist.AddTrainable(layer2);
                Assert.Fail("Add should throw and ArgumentException if when trying to add a layer of the wrong size, but did not.");
            }
            catch (ArgumentException)
            { }
        }
Example #3
0
        public void CanRunTwoLayerNetWithOneInput()
        {
            Layer inputlayer = new Layer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1, 1 }, { 1, 1, 1 }
            }));
            Layer outputlayer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1 }
            }));
            NetComponentChain network = new NetComponentChain();

            network.AddFixed(inputlayer);
            network.AddTrainable(outputlayer);

            NeuralNet.NetworkVector inputvector = new NeuralNet.NetworkVector(new double[] { 1, 0, 0 });
            NetworkVector           result      = network.Run(inputvector);

            NeuralNet.NetworkVector outputCheck = new NeuralNet.NetworkVector(new double[] { 2 });
            Assert.AreEqual(outputCheck, result);
        }
Example #4
0
        public void CanBackPropagateTwoLayerNetGradient1()
        {
            Layer inputlayer = new Layer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1, 1 }, { 1, 1, 1 }
            }));
            Layer outputlayer = Layer.CreateLinearLayer(new NeuralNet.WeightsMatrix(new double[, ] {
                { 1, 1 }
            }));
            NetComponentChain network = new NetComponentChain();

            network.AddFixed(inputlayer);
            network.AddTrainable(outputlayer);

            NeuralNet.NetworkVector inputvector    = new NeuralNet.NetworkVector(new double[] { 1, 0, 0 });
            NeuralNet.NetworkVector outputgradient = new NeuralNet.NetworkVector(new double[] { 1 });

            network.Run(inputvector);

            NeuralNet.NetworkVector inputGradientCheck = new NeuralNet.NetworkVector(new double[] { 2, 2, 2 });
            Assert.AreEqual(inputGradientCheck, network.InputGradient(outputgradient));
        }
Example #5
0
        public void TrainBatch_SmallChain_CorrectOnePass()
        {
            int inputs        = 3;
            int inputneurons  = 2;
            int outputneurons = 1;

            double[,] inputWeights  = new double[inputneurons, inputs];
            double[,] outputWeights = new double[outputneurons, inputneurons];

            for (int i = 0; i < inputneurons; i++)
            {
                for (int j = 0; j < inputs; j++)
                {
                    inputWeights[i, j] = 1;
                }
            }

            for (int i = 0; i < outputneurons; i++)
            {
                for (int j = 0; j < inputneurons; j++)
                {
                    outputWeights[i, j] = 1;
                }
            }

            Layer InputLayer  = Layer.CreateLinearLayer(new WeightsMatrix(inputWeights), new NetworkVector(inputneurons));
            Layer OutputLayer = Layer.CreateLinearLayer(new WeightsMatrix(outputWeights), new NetworkVector(outputneurons));

            NetComponentChain network = new NetComponentChain();

            network.AddTrainable(InputLayer);
            network.AddTrainable(OutputLayer);


            TrainingCollection trainingVectors = new TrainingCollection
            {
                new VectorPair(
                    new NetworkVector(new double[] { 0, 0, 0 }),
                    new NetworkVector(new double[] { 1 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 0, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 0, 1, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 1, 0 }),
                    new NetworkVector(new double[] { 1 })
                    )
            };

            Trainer trainer = new Trainer(network, new SquaredError(), new GradientDescent());

            trainer.Train(trainingVectors);

            WeightsMatrix inputWeightsCheck = new WeightsMatrix(new double[, ] {
                { -4, -4, 1 }, { -4, -4, 1 }
            });
            NetworkVector inputBiasesCheck   = new NetworkVector(new double[] { -6, -6 });
            WeightsMatrix outputWeightsCheck = new WeightsMatrix(new double[, ] {
                { -9, -9 }
            });
            NetworkVector outputBiasesCheck = new NetworkVector(new double[] { -6 });

            Assert.AreEqual(inputWeightsCheck, InputLayer.Weights);
            Assert.AreEqual(inputBiasesCheck, InputLayer.Biases);
            Assert.AreEqual(outputWeightsCheck, OutputLayer.Weights);
            Assert.AreEqual(outputBiasesCheck, OutputLayer.Biases);
        }