예제 #1
0
        public Form1()
        {
            FilePath = "IRIS_Train.csv";
            Tuple <double, double, double, double> Norm = Tuple.Create(10.0, 0.0, 3.0, 0.0);
            List <double[]> input  = new List <double[]>();
            List <double[]> output = new List <double[]>();
            var             t      = Data.ReadData("IRIS.csv", 4, 1, Norm, false);

            input  = t.Item1;
            output = t.Item2;

            PerceptronNetwork network = new MLP.PerceptronNetwork(new int[] { 4, 5, 5, 1 });

            NetworkController = new MLP.Network_Control(network, output, input, 0.02, false, Norm, 4, 1);
            InitializeComponent();

            //Set image to the starting network's image
            if (!cbdisabledrawing.Checked)
            {
                var Image = NetworkImage.Draw(network, network.Layers.Count);
                IMG_NetworkTopography.Image    = Image;
                IMG_NetworkTopography.SizeMode = PictureBoxSizeMode.Zoom;
            }
            rtbConsole.Text          = "";
            NetworkController.Paused = true;
        }
예제 #2
0
        private void bControllerReset_Click(object sender, EventArgs e)
        {
            int inputCount;
            int outputCount;
            var structure = ParseStructureBox(out inputCount, out outputCount);

            Tuple <double, double, double, double> Norm = Tuple.Create(10.0, 0.0, 3.0, 0.0);
            List <double[]> input  = new List <double[]>();
            List <double[]> output = new List <double[]>();
            var             t      = Data.ReadData(FilePath, inputCount, outputCount, Norm, rbMNIST.Checked);

            input  = t.Item1;
            output = t.Item2;

            PerceptronNetwork network = new MLP.PerceptronNetwork(structure);

            NetworkController = new MLP.Network_Control(network, output, input, Double.Parse(tbLearningRate.Text), false, Norm, inputCount, outputCount);

            cartesianChart1.Series[0].Values.Clear();
            cartesianChart2.Series[0].Values.Clear();
            cartesianChart3.Series[0].Values.Clear();
            if (!cbdisabledrawing.Checked)
            {
                var Image = NetworkImage.Draw(NetworkController.Network, NetworkController.Network.Layers.Count);
                IMG_NetworkTopography.Image = Image;
            }
        }
예제 #3
0
        /// <summary>
        /// Draws the given Network structure
        /// </summary>
        /// <param name="Network"></param>
        /// <param name="layerCount"></param>
        /// <returns></returns>
        public static Bitmap Draw(PerceptronNetwork Network, int layerCount)
        {
            Bitmap   m = new Bitmap(layerCount * 200, Network.Layers.Max(item => item.Layer_Neurons.Count) * 200);
            Graphics g = Graphics.FromImage(m);

            int row            = 0;
            int column         = 0;
            int previous_count = 0;

            foreach (Layer l in Network.Layers)
            {
                Font       drawFont   = new Font("Arial", 10);
                SolidBrush drawBrush  = new SolidBrush(Color.Black);
                PointF     layerPoint = new PointF(200 + column * 150 + 50 / 2 - 10, 180);
                g.DrawString("Layer " + column, drawFont, drawBrush, layerPoint);

                row = 0;
                foreach (Perceptron p in l.Layer_Neurons)
                {
                    int offset     = 200;
                    int mult       = 150;
                    int lineOffset = 50;

                    int x = offset + column * mult;
                    int y = offset + row * mult;

                    //Going from left to right place ellipses in place of perceptrons and add labels to them
                    g.DrawEllipse(Pens.Black, x, y, 100, 100);

                    PointF drawPoint = new PointF(x + (lineOffset / 2) - 10, y + (lineOffset / 2) + 10);
                    g.DrawString("Bias" + Math.Round(p.Bias, 3) + "\nOutput:" + Math.Round(p.Output, 2), drawFont, drawBrush, drawPoint);

                    //Add lines
                    if (column != 0)
                    {
                        for (int b = 0; b < previous_count; b++)
                        {
                            g.DrawLine(Pens.Black, new Point(x, y + lineOffset), new Point(x - mult + 2 * lineOffset, offset + b * mult + lineOffset));
                        }
                    }

                    row++;
                }
                column++;
                previous_count = l.Layer_Neurons.Count;
            }

            m.Save("Network.png", ImageFormat.Png);
            return(m);
        }
예제 #4
0
        private void rbMNIST_CheckedChanged(object sender, EventArgs e)
        {
            //btnbatch.Enabled = false;


            Tuple <double, double, double, double> Norm = Tuple.Create(255.0, 0.0, 9.0, 0.0);
            List <double[]> input  = new List <double[]>();
            List <double[]> output = new List <double[]>();
            var             t      = Data.ReadData("mnist_test.csv", 785, 1, Norm, true);

            input  = t.Item1;
            output = t.Item2;
            //https://www.kaggle.com/donfuzius/vectordigits
            PerceptronNetwork network = new MLP.PerceptronNetwork(new int[] { 784, 784, 1 });

            NetworkController = new MLP.Network_Control(network, output, input, double.Parse(tbLearningRate.Text), false, Norm, 784, 1);
            tbStructure.Text  = "784 784 1";

            /*
             * Tuple<double, double, double, double> Norm = Tuple.Create(255.0, 0.0, 9.0, 0.0);
             *  List<double[]> input = new List<double[]>();
             *  List<double[]> output = new List<double[]>();
             *  var t = Data.ReadData(path, 784, 1, Norm);
             *  input = t.Item1;
             *  output = t.Item2;
             *
             *  int errors = 0;
             *  for (int i = 0; i < input.Count; i++)
             *  {
             *      for (int j = 0; j < output[i].Length; j++)
             *      {
             *          var expected = Data.inverseNormalize(output[i][j], Norm.Item4, Norm.Item3);
             *
             *          var a = NetworkController.Network.Activate(input[i]);   //Get the result from the given outputs
             *          var bbb = Data.inverseNormalize(a[j], Norm.Item4, Norm.Item3);  //Inverse normalize the result to fit it between output values
             *
             *          if (Math.Round(bbb) != expected) { errors++; }//Error?
             *          rtbConsole.Text += "Expected: " + expected + " Got :" + Math.Round(bbb) + "\n";
             *          rtbConsole.SelectionStart = rtbConsole.Text.Length;
             *          rtbConsole.ScrollToCaret();
             *
             *      }
             *
             *
             *  }
             *  rtbConsole.Text += "Total Number of Errors: " + errors + "/" + input.Count*output.Count + "\n";
             *
             */
        }
예제 #5
0
        static void Main(string[] args)
        {
            PerceptronNetwork p;
            var t = Data.ReadData(inputPath, inputCount, outputCount, Norm, false);

            input  = t.Item1;
            output = t.Item2;

            p = new PerceptronNetwork(new int[] { inputCount, 5, 5, outputCount });

            while (!p.Learn(input, output, 0.05, 0.01, 3000))
            {
                p = new PerceptronNetwork(new int[] { inputCount, 5, 5, outputCount });
            }


            /*
             * MLP.Network_Control control = new MLP.Network_Control(p, output, input, 0.01, false, Norm, inputCount, outputCount);
             *
             * for (int i = 0; i < 3000; i++)
             * {
             * control.Step();
             * }
             *
             * MLP.NetworkImage.Draw(p, p.Layers.Count);
             *
             * var sst = control.TestValues(1,1,1,1);
             * Console.WriteLine(sst);
             *
             * Console.WriteLine(control.CurrentError);
             */
            while (true)
            {
                double[] val = new double[inputCount];
                for (int i = 0; i < inputCount; i++)
                {
                    Console.WriteLine("Input Test Value " + i + ": ");
                    val[i] = Data.normalize(double.Parse(Console.ReadLine()), Norm.Item2, Norm.Item1);
                }
                double[] sal = p.Activate(val);
                for (int i = 0; i < outputCount; i++)
                {
                    Console.Write("Result: " + i + ": " + Data.inverseNormalize(sal[i], Norm.Item4, Norm.Item3) + " ");
                }
                Console.WriteLine("");
            }
        }
예제 #6
0
        private void rbIris_CheckedChanged(object sender, EventArgs e)
        {
            //btnbatch.Enabled = true;


            Tuple <double, double, double, double> Norm = Tuple.Create(10.0, 0.0, 3.0, 0.0);
            List <double[]> input  = new List <double[]>();
            List <double[]> output = new List <double[]>();
            var             t      = Data.ReadData("IRIS_Train.csv", 4, 1, Norm, false);

            input  = t.Item1;
            output = t.Item2;

            PerceptronNetwork network = new MLP.PerceptronNetwork(new int[] { 4, 5, 5, 1 });

            NetworkController = new MLP.Network_Control(network, output, input, double.Parse(tbLearningRate.Text), false, Norm, 4, 1);
            tbStructure.Text  = "4 5 5 1";
        }
        /// <summary>
        /// Network controller, static networks are non-interactive but somewhat faster
        /// </summary>
        /// <param name="network"></param>
        /// <param name="isStatic"></param>
        public Network_Control(PerceptronNetwork network, List <double[]> output, List <double[]> input, double learningRate, bool isStatic, Tuple <double, double, double, double> normalizer, int inputcount, int outputcount)
        {
            Network          = network;
            Paused           = false;
            CurrentIteration = 0;
            CurrentError     = Int32.MaxValue;
            BestError        = CurrentError;
            Error_Historian  = new List <double>();
            Delay_Historian  = new List <System.TimeSpan>();
            LearningRate     = learningRate;

            Output = new List <double[]>();
            Input  = new List <double[]>();

            InputCount  = inputcount;
            OutputCount = outputcount;

            Output.AddRange(output);
            Input.AddRange(input);
            Norm = normalizer;
        }