Exemple #1
0
        public void LogisticRegression()
        {
            var X = Matrix <double> .Build.DenseOfArray(new double[, ] {
                { 1 }, { 2 }
            });

            var Y = Matrix <double> .Build.DenseOfArray(new double[, ] {
                { 1 }
            });

            var model = new DeepCat.DeepCat();

            model.Add(new Dense(1, Activations.Sigmoid(), weightInitializer: Initializations.Fixed()));
            model.Compile(X.RowCount, LossFunctions.CrossEntropy(), Optimizers.GradientDescent(0.02));
            model.Fit(X, Y, 1);

            var a = model.Predict(X);

            a[0, 0] = Math.Round(a[0, 0], 8);

            var expectedResult = Matrix <double> .Build.DenseOfArray(new double[, ] {
                { 0.59859297 }
            });

            Assert.AreEqual(a, expectedResult);
        }
Exemple #2
0
 public void SetActivation(string activationType)
 {
     if (activationType == "tanh")
     {
         Activation = Activations.Tanh();
     }
     else if (activationType == "tanhAbs")
     {
         Activation = Activations.TanhAbs();
     }
     else if (activationType == "sigmoid")
     {
         Activation = Activations.Sigmoid();
     }
     else if (activationType == "sin")
     {
         Activation = Activations.Sin();
     }
     else if (activationType == "fract")
     {
         Activation = Activations.Fract();
     }
     else if (activationType == "rescale")
     {
         Activation = Activations.Rescale();
     }
     else if (activationType == "downscale")
     {
         Activation = Activations.Downscale();
     }
     else if (activationType == "gaussian")
     {
         Activation = Activations.Gaussian();
     }
     else if (activationType == "square")
     {
         Activation = Activations.Square();
     }
     else if (activationType == "abs")
     {
         Activation = Activations.Abs();
     }
     else if (activationType == "cos")
     {
         Activation = Activations.Cos();
     }
     else if (activationType == "linear")
     {
         Activation = Activations.Linear();
     }
     else if (activationType == "random")
     {
         Activation = Activations.Random();
     }
 }
Exemple #3
0
        private KerasSymbol _Call(KerasSymbol x)
        {
            switch (activation)
            {
            case "elu":
                return(Activations.Elu(x));

            case "exp":
                return(Activations.Exponential(x));

            case "hard_sigmoid":
                return(Activations.HardSigmoid(x));

            case "linear":
                return(Activations.Linear(x));

            case "relu":
                return(Activations.Relu(x));

            case "selu":
                return(Activations.Selu(x));

            case "sigmoid":
                return(Activations.Sigmoid(x));

            case "softmax":
                return(Activations.Softmax(x));

            case "softplus":
                return(Activations.Softplus(x));

            case "softsign":
                return(Activations.Softsign(x));

            case "tanh":
                return(Activations.Tanh(x));

            default:
                break;
            }

            return(Activations.Linear(x));
        }
Exemple #4
0
        /// <summary>
        /// Calculates Output for Genome
        /// </summary>
        /// <param name="input">Input for Genome</param>
        /// <returns>Output-Values for Genome</returns>
        public double[] Calculate(double[] input)
        {
            int output = 0;

            Profiler.BeginSample("Set up Network");
            if (inputConnectionsPerNode.Count == 0)
            {
                SetUpNetwork();
            }
            Profiler.EndSample();
            Profiler.BeginSample("CalcNodes");
            for (int i = 0; i < nodesInOrder.Count; i++)
            {
                NodeGene gene = Nodes[nodesInOrder[i]];
                if (gene.Type == NodeType.INPUT) // Input-Nodes are always at the start, as they have the lowest Innovation-Number
                {
                    gene.SetState(input[i]);
                }
                else
                {
                    List <ConnectionGene> inputs = inputConnectionsPerNode[gene];
                    double N = 0;
                    Profiler.BeginSample("HiddenNode");
                    for (int j = 0; j < inputs.Count; j++)
                    {
                        ConnectionGene conn   = inputs[j];
                        NodeGene       inNode = Nodes[conn.In.Innovation]; // Grab Node from Nodes to get proper State (We're using Structs)
                        N += conn.Weight * inNode.State;
                    }
                    Profiler.EndSample();
                    gene.SetState(Activations.Sigmoid(N));
                    if (gene.Type == NodeType.OUTPUT)
                    {
                        outputCache[output] = gene.State;
                    }
                }
            }
            Profiler.EndSample();
            return(outputCache);
        }
Exemple #5
0
        static void Main(string[] args)
        {
            var X = Matrix <double> .Build.Random(5, 100);

            var Y = Matrix <double> .Build.Random(1, 100);

            var test = Matrix <double> .Build.Random(5, 1);


            var model = new DeepCat();

            model.Add(new Dense(5, Activations.Relu(), weightInitializer: Initializations.RandomNormal()));
            model.Add(new Dense(5, Activations.Relu(), weightInitializer: Initializations.RandomNormal()));
            model.Add(new Dense(1, Activations.Sigmoid()));

            model.Compile(X.RowCount, LossFunctions.CrossEntropy(), Optimizers.GradientDescent(0.002));

            model.Fit(X, Y, 100);
            model.Predict(test);



            var x = 1;
        }