Inheritance: IActivationFunction
        public void TestGaussian()
        {
            var activation = new ActivationGaussian();
            Assert.IsTrue(activation.HasDerivative);

            var clone = (ActivationGaussian) activation.Clone();
            Assert.IsNotNull(clone);

            double[] input = {0.0};

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(1.0, input[0], 0.1);
        }
Example #2
0
        public void TestGaussian()
        {
            var activation = new ActivationGaussian();

            Assert.IsTrue(activation.HasDerivative);

            var clone = (ActivationGaussian)activation.Clone();

            Assert.IsNotNull(clone);

            double[] input = { 0.0 };

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(1.0, input[0], 0.1);
        }
Example #3
0
        public void TestGaussian()
        {
            var activation = new ActivationGaussian();

            Assert.IsTrue(activation.HasDerivative);

            var clone = activation.Clone();

            Assert.IsInstanceOfType(clone, typeof(ActivationGaussian));

            double[] input = { 0.0 };

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(1.0, input[0], 0.1);
        }
        public void TestGaussian()
        {
            var activation = new ActivationGaussian(0.0, 0.5, 1.0);
            Assert.IsTrue(activation.HasDerivative());

            var clone = (ActivationGaussian) activation.Clone();
            Assert.IsNotNull(clone);

            double[] input = {0.0};

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(0.5, input[0], 0.1);

            // test derivative, should throw an error

            input[0] = activation.DerivativeFunction(input[0],input[0]);
            Assert.AreEqual(-33, (int) (input[0]*100), 0.1);
        }
Example #5
0
        public void TestGaussian()
        {
            var activation = new ActivationGaussian(0.0, 0.5, 1.0);

            Assert.IsTrue(activation.HasDerivative());

            var clone = (ActivationGaussian)activation.Clone();

            Assert.IsNotNull(clone);

            double[] input = { 0.0 };

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(0.5, input[0], 0.1);


            // test derivative, should throw an error

            input[0] = activation.DerivativeFunction(input[0], input[0]);
            Assert.AreEqual(-33, (int)(input[0] * 100), 0.1);
        }
Example #6
0
 void AddLayers(List<LayerConfig> gen)
 {
     foreach (var g in gen)
     {
         IActivationFunction act;
         if (g.ActivationType == 0)
         {
             act = new ActivationBiPolar();
         }
         switch (g.ActivationType )
         {
             case 0:
                 act = new ActivationBiPolar();
                 break;
             case 1:
                 act = new ActivationBipolarSteepenedSigmoid ();
                 break;
             case 2:
                 act = new ActivationClippedLinear();
                 break;
             case 3:
                 act = new ActivationCompetitive();
                 break;
             case 4:
                 act = new ActivationElliott();
                 break;
             case 5:
                 act = new ActivationElliottSymmetric();
                 break;
             case 6:
                 act = new ActivationGaussian();
                 break;
             case 7:
                 act = new ActivationLinear();
                 break;
             case 8:
                 act = new ActivationLOG();
                 break;
             case 9:
                 act = new ActivationRamp();
                 break;
             case 10:
                 act = new ActivationRamp();
                 break;
             case 11:
                 act = new ActivationSigmoid();
                 break;
             case 12:
                 act = new ActivationSIN();
                 break;
             case 13:
                 act = new ActivationSoftMax();
                 break;
             case 14:
                 act = new ActivationSteepenedSigmoid();
                 break;
             case 15:
                 act = new ActivationStep();
                 break;
             case 16:
                 act = new ActivationTANH();
                 break;
             default:
                 act = new ActivationSoftMax();
                 break;
         }
         network.AddLayer(new BasicLayer(act, g.hasBias, g.neurons));
     }
 }