Exemplo n.º 1
0
        public IActivationFunction GetActivationFunction()
        {
            IActivationFunction activation;

            switch (ActivationFunction)
            {
            case ActivationFunctionType.Linear:
                activation = new ActivationLinear();
                break;

            case ActivationFunctionType.Sigmoid:
                activation = new ActivationSigmoid();
                break;

            case ActivationFunctionType.TanH:
                activation = new ActivationTANH();
                break;

            case ActivationFunctionType.SoftMax:
                activation = new ActivationSoftMax();
                break;

            case ActivationFunctionType.ReLU:
                activation = new ActivationReLU();
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }
            return(activation);
        }
Exemplo n.º 2
0
            void AddLayers(List <LayerConfig> gen)
            {
                foreach (var g in gen)
                {
                    IActivationFunction act;
                    if (g.ActivationType == 0)
                    {
                        act = new ActivationBiPolar();
                    }
                    switch (g.ActivationType)
                    {
                    case 0:
                        act = new ActivationBiPolar();
                        break;

                    case 1:
                        act = new ActivationBipolarSteepenedSigmoid();
                        break;

                    case 2:
                        act = new ActivationClippedLinear();
                        break;

                    case 3:
                        act = new ActivationCompetitive();
                        break;

                    case 4:
                        act = new ActivationElliott();
                        break;

                    case 5:
                        act = new ActivationElliottSymmetric();
                        break;

                    case 6:
                        act = new ActivationGaussian();
                        break;

                    case 7:
                        act = new ActivationLinear();
                        break;

                    case 8:
                        act = new ActivationLOG();
                        break;

                    case 9:
                        act = new ActivationRamp();
                        break;

                    case 10:
                        act = new ActivationRamp();
                        break;

                    case 11:
                        act = new ActivationSigmoid();
                        break;

                    case 12:
                        act = new ActivationSIN();
                        break;

                    case 13:
                        act = new ActivationSoftMax();
                        break;

                    case 14:
                        act = new ActivationSteepenedSigmoid();
                        break;

                    case 15:
                        act = new ActivationStep();
                        break;

                    case 16:
                        act = new ActivationTANH();
                        break;

                    default:
                        act = new ActivationSoftMax();
                        break;
                    }
                    network.AddLayer(new BasicLayer(act, g.hasBias, g.neurons));
                }
            }