Наследование: IActivationFunction
Пример #1
0
 public EncogTANHActivation()
     : base()
 {
     activationFunction = new ActivationTANH();
     activationLevel = 1.0;
     activationMin = -1.0;
     activationMax = 1.0;
 }
        public void TestTANH()
        {
            var activation = new ActivationTANH();
            Assert.IsTrue(activation.HasDerivative);

            var clone = (ActivationTANH) activation.Clone();
            Assert.IsNotNull(clone);

            double[] input = {0.0};

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(0.0, input[0], 0.1);

            // test derivative, should throw an error
            input[0] = activation.DerivativeFunction(input[0],input[0]);
            Assert.AreEqual(1.0, input[0], 0.1);
        }
Пример #3
0
        public void TestTANH()
        {
            var activation = new ActivationTANH();

            Assert.IsTrue(activation.HasDerivative);

            var clone = activation.Clone();

            Assert.IsInstanceOfType(clone, typeof(ActivationTANH));

            double[] input = { 0.0 };

            activation.ActivationFunction(input, 0, 1);
            Assert.AreEqual(0.0, input[0], 0.1);

            input[0] = activation.DerivativeFunction(input[0], input[0]);
            Assert.AreEqual(1.0, input[0], 0.1);
        }
        public void TestTANH()
        {
            var activation = new ActivationTANH();

            Assert.IsTrue(activation.HasDerivative);

            var clone = (ActivationTANH)activation.Clone();

            Assert.IsNotNull(clone);

            double[] input = { 0.0 };

            activation.ActivationFunction(input, 0, 1);

            Assert.AreEqual(0.0, input[0], 0.1);


            // test derivative, should throw an error
            input[0] = activation.DerivativeFunction(input[0], input[0]);
            Assert.AreEqual(1.0, input[0], 0.1);
        }
Пример #5
0
 public FlatNetwork(int input, int hidden1, int hidden2, int output, bool tanh)
 {
     // This item is obfuscated and can not be translated.
     FlatLayer[] layerArray;
     IActivationFunction function2;
     int num;
     IActivationFunction function3;
     IActivationFunction activation = new ActivationLinear();
     if (tanh)
     {
         goto Label_01EA;
     }
     goto Label_0219;
     Label_000B:
     this._connectionLimit = 0.0;
     this.Init(layerArray);
     if (0 != 0)
     {
         if (((uint) hidden1) > uint.MaxValue)
         {
             goto Label_0069;
         }
         goto Label_0219;
     }
     if (((uint) num) < 0)
     {
         goto Label_0189;
     }
     return;
     Label_0069:
     layerArray[3] = new FlatLayer(function2, output, 0.0);
     if ((((uint) num) + ((uint) output)) >= 0)
     {
     }
     Label_0098:
     this._isLimited = false;
     goto Label_000B;
     Label_00A1:
     if (hidden2 != 0)
     {
         if ((((uint) tanh) - ((uint) hidden2)) > uint.MaxValue)
         {
             goto Label_01AA;
         }
         layerArray = new FlatLayer[4];
         layerArray[0] = new FlatLayer(activation, input, 1.0);
         layerArray[1] = new FlatLayer(function2, hidden1, 1.0);
         if ((((uint) tanh) + ((uint) hidden2)) <= uint.MaxValue)
         {
             layerArray[2] = new FlatLayer(function2, hidden2, 1.0);
             if (((uint) hidden2) > uint.MaxValue)
             {
                 goto Label_015F;
             }
             goto Label_0069;
         }
         goto Label_000B;
     }
     Label_010F:
     num = Math.Max(hidden1, hidden2);
     layerArray = new FlatLayer[] { new FlatLayer(activation, input, 1.0), new FlatLayer(function2, num, 1.0), new FlatLayer(function2, output, 0.0) };
     goto Label_0098;
     Label_015F:
     if (((uint) output) > uint.MaxValue)
     {
         goto Label_01AA;
     }
     if ((((uint) tanh) + ((uint) num)) <= uint.MaxValue)
     {
         goto Label_0198;
     }
     Label_0189:
     if (-1 != 0)
     {
         goto Label_00A1;
     }
     goto Label_010F;
     Label_0198:
     if (hidden1 != 0)
     {
         goto Label_00A1;
     }
     goto Label_010F;
     Label_01AA:
     if (hidden2 != 0)
     {
         goto Label_0198;
     }
     layerArray = new FlatLayer[] { new FlatLayer(activation, input, 1.0), new FlatLayer(function2, output, 0.0) };
     goto Label_0098;
     Label_01EA:
     function2 = new ActivationTANH();
     if (hidden1 != 0)
     {
         goto Label_015F;
     }
     goto Label_01AA;
     Label_0219:
     function3 = new ActivationSigmoid();
     if ((((uint) num) | 2) != 0)
     {
         goto Label_01EA;
     }
     goto Label_010F;
 }
Пример #6
0
        /// <summary>
        /// Initializes the neural network prior to the training. The actual activation output 
        /// for the output layer and its maximum and minimum levels are also defined here.
        /// </summary>
        /// <returns></returns>
        private BasicNetwork PrepareNetwork()
        {
            IActivationFunction outputLayerActivation;

            EncogActivation encogActivation = (EncogActivation)base.activationFunction;

            _activationMin = base.activationFunction.activationMin;
            _activationMax = base.activationFunction.activationMax;

            // Checking if the selected activation function is adequate for the
            // output layer and changing it if needed. The activation function in the
            // last neuron layer should produce outputs between [0,1] or [-1,1].

            if (_activationMin < -1 && _activationMax > 1)
            {
                //We will use a TANH activation function for the output layer
                outputLayerActivation = new ActivationTANH();
                _activationMin = -1;
                _activationMax = 1;
            }

            else if (_activationMin == 0 && _activationMax > 1)
            {
                //We will use a Sigmoid activation function for the output layer
                outputLayerActivation = new ActivationSigmoid();
                _activationMin = 0;
                _activationMax = 1;
            }

            else
            {
                outputLayerActivation = ((EncogActivation)base.activationFunction).CreateInstance();
            }

            BasicNetwork network = new BasicNetwork();

            if (autoHiddenLayerSize)
                _actualHiddenLayerSize = inputDim * hiddenLayerSizeFactor;
            else _actualHiddenLayerSize = hiddenLayerSize;

            network.AddLayer(new BasicLayer(null, true, inputDim));
            network.AddLayer(new BasicLayer(encogActivation.CreateInstance(), true, _actualHiddenLayerSize));
            network.AddLayer(new BasicLayer(outputLayerActivation, false, outputDim));
            network.Structure.FinalizeStructure();
            network.Reset();

            return network;
        }
Пример #7
0
 void AddLayers(List<LayerConfig> gen)
 {
     foreach (var g in gen)
     {
         IActivationFunction act;
         if (g.ActivationType == 0)
         {
             act = new ActivationBiPolar();
         }
         switch (g.ActivationType )
         {
             case 0:
                 act = new ActivationBiPolar();
                 break;
             case 1:
                 act = new ActivationBipolarSteepenedSigmoid ();
                 break;
             case 2:
                 act = new ActivationClippedLinear();
                 break;
             case 3:
                 act = new ActivationCompetitive();
                 break;
             case 4:
                 act = new ActivationElliott();
                 break;
             case 5:
                 act = new ActivationElliottSymmetric();
                 break;
             case 6:
                 act = new ActivationGaussian();
                 break;
             case 7:
                 act = new ActivationLinear();
                 break;
             case 8:
                 act = new ActivationLOG();
                 break;
             case 9:
                 act = new ActivationRamp();
                 break;
             case 10:
                 act = new ActivationRamp();
                 break;
             case 11:
                 act = new ActivationSigmoid();
                 break;
             case 12:
                 act = new ActivationSIN();
                 break;
             case 13:
                 act = new ActivationSoftMax();
                 break;
             case 14:
                 act = new ActivationSteepenedSigmoid();
                 break;
             case 15:
                 act = new ActivationStep();
                 break;
             case 16:
                 act = new ActivationTANH();
                 break;
             default:
                 act = new ActivationSoftMax();
                 break;
         }
         network.AddLayer(new BasicLayer(act, g.hasBias, g.neurons));
     }
 }