Exemplo n.º 1
0
        public void CanRunBigNetWithThreeInputs()
        {
            int inputs        = 100;
            int inputneurons  = 2000;
            int outputneurons = 1;

            double[] inputArray = new double[inputs];
            inputArray[0]  = 1;
            inputArray[7]  = 1;
            inputArray[77] = 1;
            NetworkVector inputvector = new NetworkVector(inputArray);

            LinearTwoLayerTestNetwork network = new LinearTwoLayerTestNetwork(inputs, inputneurons, outputneurons);

            network.Run(inputvector);

            double output = network.Output.ToArray()[0];

            Assert.AreEqual(6000, output);
        }
Exemplo n.º 2
0
        public void CanBack2()
        {
            wc_2.Run(vector_3);

            NetworkVector inputGradientCheck = new NetworkVector(
                new double[] { 11 * 1 + 12 * 2, 11 * 2 + 12 * 3, 11 * 3 + 12 * 4 }
                );
            NetworkVector biasesGradientCheck = new NetworkVector(
                new double[] { 11, 12 }
                );
            WeightsMatrix weightsGradientCheck = new WeightsMatrix(
                new double[, ] {
                { 11 * 111, 11 * 112, 11 * 113 }, { 12 * 111, 12 * 112, 12 * 113 }
            }
                );

            Assert.AreEqual(inputGradientCheck, wc_2.InputGradient(vector_2));
            Assert.AreEqual(biasesGradientCheck, wc_2.BiasesGradient(vector_2));
            Assert.AreEqual(weightsGradientCheck, wc_2.WeightsGradient(vector_2, vector_3));
        }
Exemplo n.º 3
0
        public void BackPropagateIsCorrect()
        {
            double[,] weights = new double[, ] {
                { 1, 2 }, { 3, 5 }
            };
            Layer layer = new LinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 });

            layer.BackPropagate(outputgradient);

            double[,] weightsCheck = new double[, ] {
                { -6, 9 }, { -8, 16 }
            };
            LayerState state = layer.State;

            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                for (int j = 0; j < layer.NumberOfInputs; j++)
                {
                    Assert.AreEqual(weightsCheck[i, j], state.Weights[i, j], string.Format("Failed for (i, j) = ({0}, {1}", i, j));
                }
            }

            double[] biasesCheck = new double[] { -7, -11 };
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(biasesCheck[i], layer.State.Biases[i]);
            }

            double[] inputGradientCheck  = new double[] { 40, 69 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i));
            }
        }
Exemplo n.º 4
0
        public void TrainOnline_LinearLayer_CorrectOnePass()
        {
            WeightsMatrix matrix = new WeightsMatrix(new double[, ] {
                { 1, 1 }
            });
            Layer layer = Layer.CreateLinearLayer(matrix);
            TrainingCollection trainingVectors = new TrainingCollection
            {
                new VectorPair(
                    new NetworkVector(new double[] { 0, 0 }),
                    new NetworkVector(new double[] { 1 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 0, 1 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 1 }),
                    new NetworkVector(new double[] { 1 })
                    )
            };

            Trainer trainer = new Trainer(layer, new SquaredError(), new GradientDescent());

            foreach (TrainingCollection tc in trainingVectors.AsSingletons())
            {
                trainer.Train(tc);
            }

            WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] {
                { 1, 3 }
            });
            NetworkVector biasesCheck = new NetworkVector(new double[] { 1 });

            Assert.AreEqual(biasesCheck, layer.Biases);
            Assert.AreEqual(weightsCheck, layer.Weights);
        }
Exemplo n.º 5
0
        public void CanRunTwoLayerNetWithOneInput()
        {
            Layer inputlayer = new Layer(new double[, ] {
                { 1, 1, 1 }, { 1, 1, 1 }
            });
            Layer outputlayer = new Layer(new double[, ] {
                { 1, 1 }
            });
            LayerChain network = new LayerChain();

            network.Add(inputlayer);
            network.Add(outputlayer);

            NetworkVector inputvector = new NetworkVector(new double[] { 1, 0, 0 });

            network.Run(inputvector);

            double output = network.Output.ToArray()[0];

            Assert.AreEqual(2, output);
        }
Exemplo n.º 6
0
        public void BatchTrainCorrectThreePasses_WC()
        {
            WeightsMatrix matrix = new WeightsMatrix(new double[, ] {
                { 1, 1 }
            });
            WeightedCombiner   wc = new WeightedCombiner(matrix);
            TrainingCollection trainingVectors = new TrainingCollection
            {
                new VectorPair(
                    new NetworkVector(new double[] { 0, 0 }),
                    new NetworkVector(new double[] { 1 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 0, 1 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 1 }),
                    new NetworkVector(new double[] { 1 })
                    )
            };

            Trainer trainer = new Trainer(wc, new SquaredError(), new GradientDescent());

            trainer.Train(trainingVectors);
            trainer.Train(trainingVectors);
            trainer.Train(trainingVectors);

            WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] {
                { -37, -37 }
            });
            NetworkVector biasesCheck = new NetworkVector(new double[] { -62 });

            Assert.AreEqual(biasesCheck, wc.Biases);
            Assert.AreEqual(weightsCheck, wc.Weights);
        }
Exemplo n.º 7
0
        public void BackpropagateRunsTwoByThree()
        {
            double[,] weights = new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            };
            Layer layer = new SigmoidLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            layer.BackPropagate(outputgradient);

            double[] inputGradientCheck  = new double[] { 0.31498075621051952, 0.52496792701753248, 0.7349550978245456 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i));
            }
        }
Exemplo n.º 8
0
        public void BackpropagateRunsTwoByThree()
        {
            double[,] weights = new double[, ] {
                { 1, 2, 3 }, { 2, 3, 4 }
            };
            Layer layer = new LinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 });

            layer.BackPropagate(outputgradient);

            double[] inputGradientCheck  = new double[] { 3, 5, 7 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i], string.Format("Failure for input {0}", i));
            }
        }
Exemplo n.º 9
0
        public void BackpropagateRunsWithNonzeroLayerInput()
        {
            double[,] weights = new double[, ] {
                { 1 }
            };
            Layer layer = new LinearLayer(weights);

            NetworkVector layerinput = new NetworkVector(new double[] { 2 });

            layer.Run(layerinput);

            NetworkVector outputgradient = new NetworkVector(new double[] { 1 });

            layer.BackPropagate(outputgradient);

            double[] inputGradientCheck  = new double[] { 1 };
            double[] inputGradientValues = layer.InputGradient.ToArray();
            for (int i = 0; i < layer.NumberOfInputs; i++)
            {
                Assert.AreEqual(inputGradientCheck[i], inputGradientValues[i]);
            }
        }
Exemplo n.º 10
0
        public void CanBPWC2x3_nonTrivialBatch()
        {
            NetworkMatrix weights = new NetworkMatrix(new double[, ] {
                { 1, 2, 3 }, { 5, 7, 11 }
            });
            NetworkVector         biases         = new NetworkVector(new double[] { 100, 200 });
            BatchWeightedCombiner wc             = new BatchWeightedCombiner(weights, biases);
            NetworkVector         input          = new NetworkVector(new double[] { 1, 2, 3 });
            NetworkVector         outputgradient = new NetworkVector(new double[] { 1, 1 });

            wc.StartBatch();
            for (int i = 0; i < 2; i++)
            {
                wc.Run(input);
                wc.BackPropagate(outputgradient);
            }
            wc.EndBatchAndUpdate();

            NetworkVector outputCheck        = new NetworkVector(new double[] { 114, 252 });
            NetworkVector inputGradientCheck = new NetworkVector(new double[] { 6, 9, 14 });

            double[,] weightsCheck = new double[, ] {
                { -1, -2, -3 }, { 3, 3, 5 }
            };
            double[] biasesCheck = new double[] { 98, 198 };
            Assert.AreEqual(outputCheck, wc.Output);
            Assert.AreEqual(inputGradientCheck, wc.InputGradient);

            for (int i = 0; i < wc.NumberOfOutputs; i++)
            {
                Assert.AreEqual(biasesCheck[i], wc.State.Biases[i]);

                for (int j = 0; j < wc.NumberOfInputs; j++)
                {
                    Assert.AreEqual(weightsCheck[i, j], wc.State.Weights[i, j]);
                }
            }
        }
Exemplo n.º 11
0
        public void BatchTrainCorrectOnePass_LinearLayer()
        {
            WeightsMatrix matrix = new WeightsMatrix(new double[, ] {
                { 1, 1 }
            });
            Layer layer = Layer.CreateLinearLayer(matrix);
            TrainingCollection trainingVectors = new TrainingCollection
            {
                new VectorPair(
                    new NetworkVector(new double[] { 0, 0 }),
                    new NetworkVector(new double[] { 1 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 0, 1 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 1 }),
                    new NetworkVector(new double[] { 1 })
                    )
            };

            Trainer trainer = new Trainer(layer, new SquaredError(), new GradientDescent());

            trainer.Train(trainingVectors);

            WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] {
                { -1, -1 }
            });
            NetworkVector biasesCheck = new NetworkVector(new double[] { -2 });

            Assert.AreEqual(biasesCheck, layer.Biases);
            Assert.AreEqual(weightsCheck, layer.Weights);
        }
Exemplo n.º 12
0
        public void RunProducesCorrectOutput()
        {
            SoftMaxUnit   smu   = new SoftMaxUnit(7);
            NetworkVector input = new NetworkVector(new double[] { 1, 0, 0, 0, 0, 0, 1 });

            NetworkVector output = smu.Run(input);

            double sum        = 5 + (2 * Math.E);
            double one_value  = Math.E / sum;
            double zero_value = 1 / sum;
            double delta      = 0.000000001;

            double[] outputvalues    = output.ToArray();
            double   outputvaluessum = output.SumValues();

            Assert.AreEqual(1.0, outputvaluessum, delta);
            Assert.AreEqual(one_value, outputvalues[0], delta);
            Assert.AreEqual(one_value, outputvalues[6], delta);
            for (int i = 1; i < smu.NumberOfOutputs - 1; i++)
            {
                Assert.AreEqual(zero_value, outputvalues[i], delta);
            }
        }
Exemplo n.º 13
0
        public void CanUseBigLinearLayer()
        {
            double[,] weights = new double[2000, 1000];
            double[] input = new double[1000];

            for (int i = 0; i < 1000; i++)
            {
                weights[i, i] = 1.0;
                input[i]      = (double)i;
            }

            NetworkVector inputvector = new NetworkVector(input);
            Layer         layer       = new LinearLayer(weights);

            layer.Run(inputvector);
            double[] result = layer.Output.ToArray();

            for (int i = 0, j = 1000; i < 1000; i++, j++)
            {
                Assert.AreEqual((double)i, result[i], "Failed for i = " + i);
                Assert.AreEqual(0.0, result[j], "Failed for j = " + j);
            }
        }
Exemplo n.º 14
0
        public void CanUpdateBatch()
        {
            AdaptationStrategy strategy = new GradientDescent(1.0, 1);

            VectorBatch result = wc_2.Run(input_batch);

            wc_2.BackPropagate(gradient_batch);
            VectorBatch inputGradient = wc_2.InputGradient(gradient_batch);

            wc_2.Update(strategy);


            NetworkVector biasesCheck  = new NetworkVector(new double[] { 8, 7 });
            WeightsMatrix weightsCheck = new WeightsMatrix(new double[, ] {
                { -4, -6, -8 }, { -6, -10, -14 }
            });

            Assert.AreEqual(biasesCheck, wc_2.Biases);
            Assert.AreEqual(weightsCheck, wc_2.Weights);
            for (int i = 0; i < inputGradient.Count; i++)
            {
                Assert.AreEqual(inputgradient_check[i], inputGradient[i]);
            }
        }
Exemplo n.º 15
0
        public void TrainBatch_SmallChain_CorrectOnePass()
        {
            int inputs        = 3;
            int inputneurons  = 2;
            int outputneurons = 1;

            double[,] inputWeights  = new double[inputneurons, inputs];
            double[,] outputWeights = new double[outputneurons, inputneurons];

            for (int i = 0; i < inputneurons; i++)
            {
                for (int j = 0; j < inputs; j++)
                {
                    inputWeights[i, j] = 1;
                }
            }

            for (int i = 0; i < outputneurons; i++)
            {
                for (int j = 0; j < inputneurons; j++)
                {
                    outputWeights[i, j] = 1;
                }
            }

            Layer InputLayer  = Layer.CreateLinearLayer(new WeightsMatrix(inputWeights), new NetworkVector(inputneurons));
            Layer OutputLayer = Layer.CreateLinearLayer(new WeightsMatrix(outputWeights), new NetworkVector(outputneurons));

            NetComponentChain network = new NetComponentChain();

            network.AddTrainable(InputLayer);
            network.AddTrainable(OutputLayer);


            TrainingCollection trainingVectors = new TrainingCollection
            {
                new VectorPair(
                    new NetworkVector(new double[] { 0, 0, 0 }),
                    new NetworkVector(new double[] { 1 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 0, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 0, 1, 0 }),
                    new NetworkVector(new double[] { 0 })
                    ),
                new VectorPair(
                    new NetworkVector(new double[] { 1, 1, 0 }),
                    new NetworkVector(new double[] { 1 })
                    )
            };

            Trainer trainer = new Trainer(network, new SquaredError(), new GradientDescent());

            trainer.Train(trainingVectors);

            WeightsMatrix inputWeightsCheck = new WeightsMatrix(new double[, ] {
                { -4, -4, 1 }, { -4, -4, 1 }
            });
            NetworkVector inputBiasesCheck   = new NetworkVector(new double[] { -6, -6 });
            WeightsMatrix outputWeightsCheck = new WeightsMatrix(new double[, ] {
                { -9, -9 }
            });
            NetworkVector outputBiasesCheck = new NetworkVector(new double[] { -6 });

            Assert.AreEqual(inputWeightsCheck, InputLayer.Weights);
            Assert.AreEqual(inputBiasesCheck, InputLayer.Biases);
            Assert.AreEqual(outputWeightsCheck, OutputLayer.Weights);
            Assert.AreEqual(outputBiasesCheck, OutputLayer.Biases);
        }
Exemplo n.º 16
0
        public void CanRunLinear()
        {
            NetworkVector result = nf_linear.Run(vector_2);

            Assert.AreEqual(vector_2, result);
        }
Exemplo n.º 17
0
        public void CanRun()
        {
            NetworkVector result = nf_1.Run(vector_1);

            Assert.AreEqual(vector_1, result);
        }
Exemplo n.º 18
0
 public void AsMatrix()
 {
     NetworkVector   vector = new NetworkVector(new double[] { 1, 2 });
     Matrix <double> result = vector.AsMatrix();
 }