예제 #1
0
        static void Main(string[] args)
        {
            int[] layerSizes = new int[3] {
                3, 4, 1
            };
            TransferFunction[] TFuncs = new TransferFunction[3] {
                TransferFunction.None,
                TransferFunction.Sigmoid,
                TransferFunction.Linear
            };

            BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, TFuncs);

            double[] input = new double[] { 4, 6, 8 };//, new double[] {4, 7, 5}, new double[] {7, 4, 8}, new double[] {6, 7, 5}, new double[] {7, 7, 8}};

            /*for(int i = 0; i < input.GetUpperBound(0); i++)
             * {
             *  input[i] = new double[3];
             *  for (int j = 0; j < input[i].Length; j++)
             *  {
             *
             *  }
             * }*/

            double[] desired = new double[] { -0.86 };//, new double[] {0.15}, new double[] {0.72 }, new double[] {0.53 }, new double[] { 0.44 } };
            double[] output  = new double[1];


            double error = 0.0;

            for (int i = 0; i < 10; i++)
            {
                error = bpn.Train(ref input, ref desired, 0.15, 0.1);
                bpn.Run(ref input, out output);
                if (i % 1 == 0)
                {
                    Console.WriteLine("Iteration {0}: \n\t Input {1:0.000} {2:0.000} {3:0.000} Output {4:0.000} error{5:0.000}", i, input[0], input[1], input[2], output[0], error);
                }

                /*for (int k = 0; k < 4; k++)
                 *  Console.WriteLine("{0:0.000}", bpn.layerOtput[0][k]);*/
            }

            Console.ReadKey();
        }
예제 #2
0
        static void Main(string[] args)
        {
            string filePath = @"E:\Work\NeuralXOR.xml";

            int[] layerSizes = new int[3] {
                2, 2, 1
            };
            TransferFunction[] tFuncs = new TransferFunction[3] {
                TransferFunction.None,
                TransferFunction.Sigmoid,
                TransferFunction.Linear
            };
            BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, tFuncs);

            //Example XOR-Gate
            bpn.Name = "XOR-Gate";

            //Define the cases
            double[][] input, ouput;

            input = new double[4][]; ouput = new double[4][];
            for (int i = 0; i < 4; i++)
            {
                input[i] = new double[2]; ouput[i] = new double[1];
            }

            input[0][0] = 0.0; input[0][1] = 0.0; ouput[0][0] = 0.0; //false XOR false = false
            input[1][0] = 1.0; input[1][1] = 0.0; ouput[1][0] = 1.0; //true XOR false = true
            input[2][0] = 0.0; input[2][1] = 1.0; ouput[2][0] = 1.0; //false XOR true = true
            input[3][0] = 1.0; input[3][1] = 1.0; ouput[3][0] = 0.0; //true XOR true = false

            //Train the network
            double error = 0.0;
            int    max_count = 1000, count = 0;

            do
            {
                //Prepare for training Epoch
                count++;
                error = 0.0;

                //Train
                for (int i = 0; i < 4; i++)
                {
                    error += bpn.Train(ref input[i], ref ouput[i], 0.15, 0.10);
                }

                //Show Progress
                if (count % 100 == 0)
                {
                    Console.WriteLine("Epoch {0} completed with error {1:0.0000}", count, error);
                }
            } while (error > 0.00001 && count <= max_count);

            //Display results!
            double[] networkOutput = new double[1];

            for (int i = 0; i < 4; i++)
            {
                bpn.Run(ref input[i], out networkOutput);
                Console.WriteLine("Case {3}: {0:0.0} xor {1:0.0} = {2:0.0000}", input[i][0], input[i][1], networkOutput[0], i + 1);
            }
            bpn.Save(filePath);



            Console.ReadLine();
        }
예제 #3
0
        static void Main(string[] args)
        {
            var layerSizes = new[] { 2, 2, 1 };

            var transferFunctions = new[]
            { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear };

            var backPropagationNetwork = new BackPropagationNetwork(layerSizes, transferFunctions)
            {
                Name = "XOR-Gate Example"
            };

            var input = new double[4][];

            var expected = new double[4][];

            for (int i = 0; i < 4; i++)
            {
                input[i]    = new double[2];
                expected[i] = new double[1];
            }

            input[0][0]    = 0.0;
            input[0][1]    = 0.0;
            expected[0][0] = 0; // false xor false = false

            input[1][0]    = 1.0;
            input[1][1]    = 0.0;
            expected[1][0] = 1; // true xor false = true

            input[2][0]    = 0.0;
            input[2][1]    = 1.0;
            expected[2][0] = 1; // false xor true = true

            input[3][0]    = 1.0;
            input[3][1]    = 1.0;
            expected[3][0] = 0; // true xor true = false


            double    error    = 0.0;
            const int maxCount = 10;
            int       count    = 0;

            Stopwatch watch = Stopwatch.StartNew();

            do
            {
                // prepare for training epic
                count++;
                error = 0;

                // train
                for (int i = 0; i < 4; i++)
                {
                    error += backPropagationNetwork.Train(ref input[i], ref expected[i], .15, .1);
                }

                if (count % 1 == 0)
                {
                    Console.WriteLine("Epoch {0} completed with error {1:0.0000}", count, error);
                }
            } while (error > 0.0001 && count <= maxCount);

            watch.Stop();

            var output = new double[4][];

            for (int i = 0; i < 4; i++)
            {
                backPropagationNetwork.Run(ref input[i], out output[i]);
            }

            for (int i = 0; i < 4; i++)
            {
                Console.WriteLine("For inputs {0} and {1}, output is {2}", input[i][0], input[i][1], output[i][0]);
            }

            Console.WriteLine("Time Elapsed :" + watch.Elapsed);
            Console.WriteLine("Hit Enter...");
            Console.ReadLine();
        }
예제 #4
0
        private void навчитиToolStripMenuItem_Click(object sender, EventArgs e)
        {
            dataGridView1.DataSource = errors;
            dataGridView2.DataSource = function;
            dataGridView3.DataSource = weightsHidden;
            dataGridView4.DataSource = weightsOutput;

            switch (comboBox1.Text)
            {
            case "ln|cosx1| + tgx2 + ctgx3":
            {
                errors.Columns.Add("iteration");
                errors.Columns.Add("function");
                errors.Columns.Add("error");
                errors.Columns.Add("output[d1]");
                errors.Columns.Add("output[d2]");

                double sum = 0.0;
                for (int i = 0; i < dt.Rows.Count; i++)
                {
                    desired[i][0] = Math.Sin(input[i][0]) + Math.Sin(input[i][1]) - Math.Sin(input[i][2]);     /* Math.Log(Math.Abs(Math.Cos(input[i][0]))) + Math.Tan(input[i][1]) + 1/Math.Tan(input[i][2]);/*Math.Cos(input[i][0]) + Math.Tan(input[i][1]) + 1 / Math.Tan(input[i][2]);*/
                    sum          += desired[i][0];
                }

                function.Columns.Add("i");
                function.Columns.Add("x1");
                function.Columns.Add("x2");
                function.Columns.Add("x3");
                function.Columns.Add("d1");
                function.Columns.Add("d2");
                function.Columns.Add("average");

                for (int i = 0; i < desired.GetUpperBound(0) + 1; i++)
                {
                    DataRow row = function.NewRow();
                    row["i"]  = i;
                    row["x1"] = input[i][0];
                    row["x2"] = input[i][1];
                    row["x3"] = input[i][2];
                    row["d1"] = desired[i][0];
                    function.Rows.Add(row);
                }

                double[] output = new double[2];

                // Навчання
                int      example = 1;
                int      h       = 0;
                double[] value   = new double[3] {
                    4, 5, 4
                };
                Random rdn = new Random();
                do
                {
                    error = 0.0;
                    input.Reverse();

                    for (int j = 0; j < dt.Rows.Count; j++)
                    {
                        error += bpn.Train(ref input[j], ref desired[j], TrainingRate, Momentum);
                    }
                    if (h % 100 == 0)
                    {
                        bpn.Run(ref value, out output);
                        DataRow row = errors.NewRow();
                        row["iteration"]  = h;
                        row["function"]   = example;
                        row["error"]      = error / 20;
                        row["output[d1]"] = output[0];
                        //row["output[d2]"] = output[1];
                        errors.Rows.Add(row);
                    }
                    h++;
                    if (h == 200000)
                    {
                        break;
                    }
                } while ((error / 20) > 0.00127);

                // Виведення скоригованих ваг прихованого шару
                hidden = bpn.GetWeights(0);
                weightsHidden.Columns.Add("Початковий/Прихований");

                for (int i = 0; i < hidden[0].Count(); i++)
                {
                    weightsHidden.Columns.Add(Convert.ToString(i));
                }

                for (int i = 0; i < hidden.GetUpperBound(0) + 1; i++)
                {
                    DataRow row = weightsHidden.NewRow();
                    row["Початковий/Прихований"] = i;
                    for (int j = 1; j < weightsHidden.Columns.Count; j++)
                    {
                        row[j] = hidden[i][j - 1];
                    }
                    weightsHidden.Rows.Add(row);
                }

                // Виведення скоригованих ваг вихідного шару
                outputWeights = bpn.GetWeights(1);
                weightsOutput.Columns.Add("Прихований/Вихідний");

                for (int i = 0; i < outputWeights[0].Count(); i++)
                {
                    weightsOutput.Columns.Add(Convert.ToString(i));
                }

                for (int i = 0; i < outputWeights.GetUpperBound(0) + 1; i++)
                {
                    DataRow row = weightsOutput.NewRow();
                    row["Прихований/Вихідний"] = i;
                    for (int j = 1; j < weightsOutput.Columns.Count; j++)
                    {
                        row[j] = outputWeights[i][j - 1];
                    }
                    weightsOutput.Rows.Add(row);
                }


                break;
            }
            }
        }
예제 #5
0
        private static void Main(string[] args)
        {
            Console.Out.WriteLine("                       BACKPROPAGATION neural network demo.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("												 Copyright(C) XP Idea.com 2001-2004 ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("The purpose of this demo is to show learning abilities of BACKPROP network.");
            Console.Out.WriteLine("The BACKPROP network is able to learn much more complex data patterns, than");
            Console.Out.WriteLine("Adaline network (please see OCR demo application). ");
            Console.Out.WriteLine("This example simple shows that the Backprop network is able to learn ");
            Console.Out.WriteLine("an 'exclusive OR' (XOR) operation, but the Adaline network is not able to do so.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("         false XOR false = false");
            Console.Out.WriteLine("         true XOR false = true");
            Console.Out.WriteLine("         false XOR true = true");
            Console.Out.WriteLine("         true XOR true = false");
            Console.Out.WriteLine("");
            Console.Out.WriteLine(" As result of the training, the network will produce output ‘0’");
            Console.Out.WriteLine("corresponding to logical ‘false’ or ‘1’ corresponding to logical ‘true’ value.");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("PLEASE HIT ENTER TO CONTINUE");
            Console.In.ReadLine();

            Console.Out.WriteLine("");
            Console.Out.WriteLine("During this demo you will be prompted to enter input values");
            Console.Out.WriteLine("for the network. Then network will perform “XOR” operation on ");
            Console.Out.WriteLine("the entered values and result will be displayed to you. ");
            Console.Out.WriteLine("Please enter any values in range from 0 to 1 and hit [ENTER] when prompted. ");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("NOW THE NETWORK IS READY TO LEARN FOLLOWING PATTERNS");
            Console.Out.WriteLine("");
            Console.Out.WriteLine("			false XOR false = false;");
            Console.Out.WriteLine("			true XOR false = true;");
            Console.Out.WriteLine("			false XOR true = true;");
            Console.Out.WriteLine("			true XOR true = false;");
            Console.Out.WriteLine("PLEASE HIT ENTER TO BEGIN TRAINING");
            Console.In.ReadLine();
            Console.Out.Write("TRAINING....");

            double d;
            BackPropagationNetwork BackPropNet;
            var patterns = new PatternsCollection(TrainingSets, 2, 1);

            SetPattern(patterns[0], 0, 0, 0);
            SetPattern(patterns[1], 0, 1, 1);
            SetPattern(patterns[2], 1, 0, 1);
            SetPattern(patterns[3], 1, 1, 0);
            //Network(0.55,0.6,
            BackPropNet = new BackPropagationNetwork(0.55, 0.6, new int[3] {
                2, 3, 1
            });
            BackPropNet.Train(patterns);
            Console.Out.WriteLine("DONE!");
            Console.Out.WriteLine("");
            //BackPropNet.SaveToFile("test.net");
            while (true)
            {
                try
                {
                    Console.Out.Write("Enter 1st value: ");
                    d = double.Parse(Console.In.ReadLine());
                    BackPropNet.InputNode(0).Value = d;
                    Console.Out.Write("Enter 2nd value: ");
                    d = double.Parse(Console.In.ReadLine());
                    BackPropNet.InputNode(1).Value = d;
                    BackPropNet.Run();
                    Console.Out.WriteLine("Result: " + Math.Round(BackPropNet.OutputNode(0).Value));
                    Console.Out.WriteLine("");
                }
                catch
                {
                    return;
                }
            }
        }