public void HyperNormalize()
        {
            double[] values = hyper_normalizer.GetValues();
            double   min    = values[0];
            double   max    = values[0];

            foreach (var value in values)
            {
                Assert.GreaterOrEqual(value, -1);
                Assert.LessOrEqual(value, 1);
                if (min > value)
                {
                    min = value;
                }
                if (max < value)
                {
                    max = value;
                }
            }
            MyAssert.CloseTo(min * DataNormalizer.SAFE_KOEFF, -1, 0.025);
            MyAssert.CloseTo(max * DataNormalizer.SAFE_KOEFF, 1, 0.025);
        }
        public void TestCanSetAnswer()
        {
            Neuron n = new Neuron();

            n.Connect(new BiasNeuron(), 1);
            InputNeuron input = new InputNeuron();

            n.Connect(input, 1);
            n.SetWeight(0, 1);
            input.Input = 1;
            var desired = 1 / (1 + Math.Pow(Math.E, -2));

            n.SetAnswer(desired);
            n.PropagateBackwards();
            MyAssert.CloseTo(n.GetDelta(), 0);

            /*
             * 1. If answer was set, than calculate like last layer,
             *  otherwise require theta and delta.
             * 2. In back prop every layer calculates it's values and
             *  sets theta+delta for every connected neuron.
             */
        }
        public void TestDerivative()
        {
            //Fails with square error function
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ders[i], ests[i], 0.0001);
            }
        }
        public void TestXNORAuto()
        {
            NNetwork n = XorNetwork();

            double[] input = new double[] { 0, 0 };
            n.SetInput(input);
            double[] output = n.GetOutput();
            MyAssert.CloseTo(output[0], 1);

            input = new double[] { 0, 1 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 0);

            input = new double[] { 1, 0 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 0);

            input = new double[] { 1, 1 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 1);
        }
示例#5
0
 public void TestParseFile()
 {
     double[] entries = parser.ParseFile(path_cola);
     Assert.AreEqual(1272, entries.Length);
     MyAssert.CloseTo(29.33, entries[entries.Length - 1]);
 }