Ejemplo n.º 1
0
        static void Main(string[] args)
        {
            NeuralNetwork NN = new NeuralNetwork(new int[] { 4, 7, 3 }, LearningRate: 0.05, Momentum: 0.001, Decay: 0.0001);

            double[][] entries = getInputs(true);
            double[][] learningData, expectedOutputData, testingData, testingDataOutputData;
            int        testingDataElementsCount = (int)(entries.Length * 0.20);

            Console.WriteLine("Shuffle data and create testing set");
            Shuffle(entries, testingDataElementsCount, out learningData, out expectedOutputData, out testingData, out testingDataOutputData);

            Console.WriteLine("Training");
            NN.Train(learningData, expectedOutputData, MaxEpoches: 2000, MSE: 0.2f);
            Console.WriteLine("Training finished");

            Console.WriteLine("Processing testing cases");
            int         rightAnswers = 0;
            NeuronLayer result;

            for (int i = 0; i < testingData.Length; i++)
            {
                double[] entry = testingData[i];
                result = NN.FeedForward(new double[] { entry[0], entry[1], entry[2], entry[3] }.GetEnumerator());
                double maxVal           = double.MinValue;
                int    maxValIndex      = 0;
                int    rightAnswerIndex = 0;
                for (int j = 0; j < result.Capacity; j++)
                {
                    if (result[j].activation > maxVal)
                    {
                        maxVal      = result[j].activation;
                        maxValIndex = j;
                    }
                    if (testingDataOutputData[i][j] == 1)
                    {
                        rightAnswerIndex = j;
                    }
                }
                if (maxValIndex == rightAnswerIndex)
                {
                    rightAnswers++;
                }
                else
                {
                    for (int j = 0; j < result.Capacity; j++)
                    {
                        Console.Write(String.Format("{0} = {1};", Math.Round(result[j].activation, 2), testingDataOutputData[i][j]));
                    }
                    Console.WriteLine();
                }
            }
            Console.WriteLine(String.Format("Network accuracy {0:p}", 1f * rightAnswers / testingDataElementsCount));
            Console.Read();
        }
Ejemplo n.º 2
0
        private static void Main(string[] args)
        {
            double[,] inputArray = new double[, ] {
                { 1 }, { 0 }
            };
            double[,] inputArray1 = new double[, ] {
                { 0 }, { 1 }
            };
            double[,] inputArray2 = new double[, ] {
                { 0 }, { 0 }
            };
            double[,] inputArray3 = new double[, ] {
                { 1 }, { 1 }
            };
            List <Tuple <double[, ], double[, ]> > trainingData = new List <Tuple <double[, ], double[, ]> >();

            double[,] outputArray = new double[, ] {
                { 1 }
            };
            double[,] outputArray1 = new double[, ] {
                { 1 }
            };
            double[,] outputArray2 = new double[, ] {
                { 0 }
            };
            double[,] outputArray3 = new double[, ] {
                { 0 }
            };
            NeuralNetwork nn = new NeuralNetwork(2, 1);

            trainingData.Add(new Tuple <double[, ], double[, ]>(inputArray, outputArray));
            trainingData.Add(new Tuple <double[, ], double[, ]>(inputArray1, outputArray1));
            trainingData.Add(new Tuple <double[, ], double[, ]>(inputArray2, outputArray2));
            trainingData.Add(new Tuple <double[, ], double[, ]>(inputArray3, outputArray3));

            foreach (var trainingSet in trainingData)
            {
                nn.Train(trainingSet.Item1, trainingSet.Item2, 5);
            }

            Matrix guess = nn.Guess(inputArray);

            Console.WriteLine(String.Format("Guess:{0}", guess[0][0].data));
            Console.ReadLine();
        }
Ejemplo n.º 3
0
        static void Main(string[] args)
        {
            var activationFunction = new ActivationFunction_Sigmoid();
            var weightInitilizer   = new WeightInitilizer_Random();
            var lossFunction       = new LossFunction_Variance();
            var learnRate          = 0.5f;

            var nn = new NeuralNetwork(learnRate, activationFunction, weightInitilizer, lossFunction);

            nn.AddLayer(2);
            nn.AddLayer(2);

            while (true)
            {
                var input = new double[] { 0.1, 0.9 };
                var real  = new double[] { 0.9, 0.1 };
                nn.Train(input, real);
                Console.Write(nn.ToString());
                Console.ReadLine();
            }
        }
Ejemplo n.º 4
0
        public static void GAN()
        {
            //Generator genFunc = new Generator();
            //Discriminator disc = new Discriminator();
            NeuralNetwork generator     = new NeuralNetwork(new int[] { 100, 128, 256, 256, 512, 2048, 48 * 48 * 3 }, new IActivationFunction[] { null, new Tanh(), new Tanh(), new Tanh(), new Tanh(), new Tanh(), new ScaledSoftStep() }, new CrossEntropy(), new SGD());
            NeuralNetwork discriminator = new NeuralNetwork(new int[] { 48 * 48 * 3, 2048, 512, 256, 256, 128, 1 }, new IActivationFunction[] { null, new Tanh(), new Tanh(), new Tanh(), new Tanh(), new Tanh(), new ScaledSoftStep() }, new CrossEntropy(), new SGD());

            Random rng = new Random(0);

            real_in = new float[1000][];
            for (int i = 0; i < real_in.Length; i++)
            {
                real_in[i] = new float[48 * 48 * 3];
                readMaki(i);
            }

            float max_gen_res = float.NegativeInfinity;

            for (int q = 0; q < 40; q++)
            {
                float[] Gz = null;
                for (int i = 0; i < real_in.Length; i++)
                {
                    for (int j = 0; j < generator_input.Length; j++)
                    {
                        generator_input[j] = (float)rng.NextDouble();// NextGaussian(0, System.Math.Pow(100, 0.5f)) * 1;
                    }
                    Gz = generator.Activate(generator_input);

                    //Train discriminator
                    var(Dz_a, Dz_z)             = discriminator.Train(real_in[i]);
                    var(Dz_nabla_w, Dz_nabla_b) = discriminator.Solver.Solve(Dz_a, Dz_z, new Vector(new float[] { 1 }));
                    (discriminator.Weights, discriminator.Biases) = discriminator.Optimizer.Optimize(discriminator.Weights, discriminator.Biases, Dz_nabla_w, Dz_nabla_b, 3f / 100);

                    var(DGz_a, DGz_z)             = discriminator.Train(Gz);
                    var(DGz_nabla_w, DGz_nabla_b) = discriminator.Solver.Solve(DGz_a, DGz_z, new Vector(new float[] { 0 }));
                    (discriminator.Weights, discriminator.Biases) = discriminator.Optimizer.Optimize(discriminator.Weights, discriminator.Biases, DGz_nabla_w, DGz_nabla_b, 3f / 100);

                    //Train Generator
                    //Now compute the error from the output of D, but don't change anything until G
                    NeuralNetwork chained = new NeuralNetwork(generator, discriminator);
                    var(Gz_a_ex, Gz_z_ex)             = chained.Train(generator_input);
                    var(Gz_nabla_w_ex, Gz_nabla_b_ex) = chained.Solver.Solve(Gz_a_ex, Gz_z_ex, new Vector(new float[] { 1 }));

                    Matrix[] Gz_nabla_w = new ArraySegment <Matrix>(Gz_nabla_w_ex, 0, generator.LayerCount).Array;
                    Vector[] Gz_nabla_b = new ArraySegment <Vector>(Gz_nabla_b_ex, 0, generator.LayerCount).Array;
                    (generator.Weights, generator.Biases) = generator.Optimizer.Optimize(generator.Weights, generator.Biases, Gz_nabla_w, Gz_nabla_b, 3f / 50);

                    if ((i % 25 == 0))
                    {
                        imsave(Gz, i);
                        max_gen_res = DGz_a[2][0];
                    }


                    Console.WriteLine($"[{i}]Discriminator Real: {Dz_a.Last()[0]} Fake: {DGz_a.Last()[0]}");
                }
                imsave(Gz, q * 1000);
                Console.WriteLine($"Epoch {q} Finished.");
            }

            SaveWeights(generator.Weights, "gen_weights.bin");
            SaveBiases(generator.Biases, "gen_biases.bin");

            SaveWeights(discriminator.Weights, "dis_weights.bin");
            SaveBiases(discriminator.Biases, "dis_biases.bin");
        }
Ejemplo n.º 5
0
        public static void GAN()
        {
            //Generator genFunc = new Generator();
            //Discriminator disc = new Discriminator();
            //NeuralNetwork generator = new NeuralNetwork(new int[] { 100, 128, 28 * 28 }, new IActivationFunction[] { null, new Tanh(), new Sigmoid() }, genFunc);
            NeuralNetwork discriminator = new NeuralNetwork(new int[] { 28 * 28, 128, 1 }, new IActivationFunction[] { null, new Tanh(), new Sigmoid() }, new CrossEntropy(), new SGD());

            Random rng = new Random(0);

            real_in      = new float[50000][];
            expected_vec = new float[50000][];
            for (int i = 0; i < real_in.Length; i++)
            {
                real_in[i]      = new float[28 * 28];
                expected_vec[i] = new float[11];
                readMNIST(i);
            }

            float[] gen_out = new float[11];
            gen_out[10] = 0;

            for (int q = 0; q < 30; q++)
            {
                float[] Gz = null;
                for (int i = 0; i < 50000; i++)
                {
                    //if (expected_vec[i][0] != 1)
                    //    continue;

                    //for (int j = 0; j < generator_input.Length; j++)
                    //    generator_input[j] = (float)rng.NextDouble() * 1;

                    //Gz = generator.Activate(generator_input);
                    //var Dg = discriminator.Activate(Gz);
                    var Dz = discriminator.Activate(real_in[i]);

                    //disc.DiscriminatorValue = Dg[0] - 1;
                    //discriminator.Train(Gz, new float[] { 0 }, 3f / 1000);

                    //disc.DiscriminatorValue = Dz[0] + 0.001f;
                    discriminator.Train(real_in[i]); //, new float[] { expected_vec[i][0] }, 3f / 1000

                    //generator.Train(generator_input, real_in[i], 3f / 5000);


                    //Console.WriteLine($"Discriminator Output: {Dx.}");
                }
                //imsave(Gz);
                Console.WriteLine($"Epoch {q} Finished.");
            }

            for (int i = 0; i < 50; i++)
            {
                var Dx = discriminator.Activate(real_in[i]);

                float max = Dx.Max();
                for (int k = 0; k < Dx.Length; k++)
                {
                    if (expected_vec[i][k] == 1)
                    {
                        Console.WriteLine($"Expected:{k}");
                    }

                    if (Dx[k] == max)
                    {
                        Console.WriteLine($"Discriminator: {k}");
                    }
                }

                Console.WriteLine();
            }
        }
Ejemplo n.º 6
0
        public static void GAN()
        {
            //Generator genFunc = new Generator();
            //Discriminator disc = new Discriminator();
            NeuralNetwork generator     = new NeuralNetwork(new int[] { 100, 128, 28 * 28 }, new IActivationFunction[] { null, new Tanh(), new Sigmoid() }, new CrossEntropy(), new SGD());
            NeuralNetwork discriminator = new NeuralNetwork(new int[] { 28 * 28, 128, 1 }, new IActivationFunction[] { null, new Tanh(), new Sigmoid() }, new CrossEntropy(), new SGD());

            Random rng = new Random(0);

            real_in      = new float[50000][];
            expected_vec = new float[50000][];
            for (int i = 0; i < real_in.Length; i++)
            {
                real_in[i]      = new float[28 * 28];
                expected_vec[i] = new float[11];
                readMNIST(i);
            }

            float[] gen_out = new float[11];
            gen_out[10] = 0;

            float max_gen_res = 0;

            for (int q = 0; q < 30; q++)
            {
                float[] Gz = null;
                for (int i = 0; i < 50000; i++)
                {
                    if (expected_vec[i][6] != 1)
                    {
                        continue;
                    }

                    for (int j = 0; j < generator_input.Length; j++)
                    {
                        generator_input[j] = (float)rng.NextGaussian(0, System.Math.Pow(28 * 28, 0.5f)) * 1;
                    }

                    Gz = generator.Activate(generator_input);

                    //Train discriminator
                    var(Dz_a, Dz_z)             = discriminator.Train(real_in[i]);
                    var(Dz_nabla_w, Dz_nabla_b) = discriminator.Solver.Solve(Dz_a, Dz_z, new Vector(new float[] { 1 }));
                    (discriminator.Weights, discriminator.Biases) = discriminator.Optimizer.Optimize(discriminator.Weights, discriminator.Biases, Dz_nabla_w, Dz_nabla_b, 3f / 1000);

                    var(DGz_a, DGz_z)             = discriminator.Train(Gz);
                    var(DGz_nabla_w, DGz_nabla_b) = discriminator.Solver.Solve(DGz_a, DGz_z, new Vector(new float[] { 0 }));
                    (discriminator.Weights, discriminator.Biases) = discriminator.Optimizer.Optimize(discriminator.Weights, discriminator.Biases, DGz_nabla_w, DGz_nabla_b, 3f / 1000);

                    //Train Generator
                    //Now compute the error from the output of D, but don't change anything until G
                    NeuralNetwork chained = new NeuralNetwork(generator, discriminator);
                    var(Gz_a_ex, Gz_z_ex)             = chained.Train(generator_input);
                    var(Gz_nabla_w_ex, Gz_nabla_b_ex) = chained.Solver.Solve(Gz_a_ex, Gz_z_ex, new Vector(new float[] { 1 }));

                    Matrix[] Gz_nabla_w = new ArraySegment <Matrix>(Gz_nabla_w_ex, 0, generator.LayerCount).Array;
                    Vector[] Gz_nabla_b = new ArraySegment <Vector>(Gz_nabla_b_ex, 0, generator.LayerCount).Array;
                    (generator.Weights, generator.Biases) = generator.Optimizer.Optimize(generator.Weights, generator.Biases, Gz_nabla_w, Gz_nabla_b, 3f / 1000);

                    if (max_gen_res < DGz_a[2][0])
                    {
                        imsave(Gz);
                        max_gen_res = DGz_a[2][0];
                    }


                    Console.WriteLine($"[{i}]Discriminator Real: {Dz_a[2][0]} Fake: {DGz_a[2][0]}");
                }
                //imsave(Gz);
                Console.WriteLine($"Epoch {q} Finished.");
            }

            for (int i = 0; i < 50; i++)
            {
                var Dx = discriminator.Activate(real_in[i]);

                float max = Dx.Max();
                for (int k = 0; k < Dx.Length; k++)
                {
                    if (expected_vec[i][k] == 1)
                    {
                        Console.WriteLine($"Expected:{k}");
                    }

                    if (Dx[k] == max)
                    {
                        Console.WriteLine($"Discriminator: {k}");
                    }
                }

                Console.WriteLine();
            }
        }