Ejemplo n.º 1
0
        private static void Initialize(NeuralNet net, int randomSeed,
                                       int inputNeuronCount, int hiddenNeuronCount, int outputNeuronCount)
        {
            #region Declarations

            int    i, j;
            Random rand;

            #endregion

            #region Initialization

            rand = new Random(randomSeed);

            #endregion

            #region Execution

            net.m_inputLayer  = new NeuralLayer();
            net.m_outputLayer = new NeuralLayer();
            net.m_hiddenLayer = new NeuralLayer();

            for (i = 0; i < inputNeuronCount; i++)
            {
                net.m_inputLayer.Add(new Neuron(0));
            }

            for (i = 0; i < outputNeuronCount; i++)
            {
                net.m_outputLayer.Add(new Neuron(rand.NextDouble()));
            }

            for (i = 0; i < hiddenNeuronCount; i++)
            {
                net.m_hiddenLayer.Add(new Neuron(rand.NextDouble()));
            }

            // wire-up input layer to hidden layer
            for (i = 0; i < net.m_hiddenLayer.Count; i++)
            {
                for (j = 0; j < net.m_inputLayer.Count; j++)
                {
                    net.m_hiddenLayer[i].Input.Add(net.m_inputLayer[j], new NeuralFactor(rand.NextDouble()));
                }
            }

            // wire-up output layer to hidden layer
            for (i = 0; i < net.m_outputLayer.Count; i++)
            {
                for (j = 0; j < net.m_hiddenLayer.Count; j++)
                {
                    net.m_outputLayer[i].Input.Add(net.HiddenLayer[j], new NeuralFactor(rand.NextDouble()));
                }
            }

            #endregion
        }
Ejemplo n.º 2
0
        public static void CalculateAndAppendTransformation(NeuralNet net)
        {
            #region Declarations

            int     i, j;
            INeuron outputNode, inputNode, hiddenNode;

            #endregion

            #region Execution

            // adjust output layer weight change
            for (j = 0; j < net.m_outputLayer.Count; j++)
            {
                outputNode = net.m_outputLayer[j];

                for (i = 0; i < net.m_hiddenLayer.Count; i++)
                {
                    hiddenNode = net.m_hiddenLayer[i];
                    outputNode.Input[hiddenNode].H_Vector += outputNode.Error * hiddenNode.Output;
                }

                outputNode.Bias.H_Vector += outputNode.Error * outputNode.Bias.Weight;
            }

            // adjust hidden layer weight change
            for (j = 0; j < net.m_hiddenLayer.Count; j++)
            {
                hiddenNode = net.m_hiddenLayer[j];

                for (i = 0; i < net.m_inputLayer.Count; i++)
                {
                    inputNode = net.m_inputLayer[i];
                    hiddenNode.Input[inputNode].H_Vector += hiddenNode.Error * inputNode.Output;
                }

                hiddenNode.Bias.H_Vector += hiddenNode.Error * hiddenNode.Bias.Weight;
            }

            #endregion
        }
Ejemplo n.º 3
0
        private static void CalculateErrors(NeuralNet net, double[] desiredResults)
        {
            #region Declarations

            int     i, j;
            double  temp, error;
            INeuron outputNode, hiddenNode;

            #endregion

            #region Execution

            // Calcualte output error values
            for (i = 0; i < net.m_outputLayer.Count; i++)
            {
                outputNode = net.m_outputLayer[i];
                temp       = outputNode.Output;

                outputNode.Error = (desiredResults[i] - temp) * SigmoidDerivative(temp); //* temp * (1.0F - temp);
            }

            // calculate hidden layer error values
            for (i = 0; i < net.m_hiddenLayer.Count; i++)
            {
                hiddenNode = net.m_hiddenLayer[i];
                temp       = hiddenNode.Output;

                error = 0;
                for (j = 0; j < net.m_outputLayer.Count; j++)
                {
                    outputNode = net.m_outputLayer[j];
                    error     += (outputNode.Error * outputNode.Input[hiddenNode].Weight) * SigmoidDerivative(temp);// *(1.0F - temp);
                }

                hiddenNode.Error = error;
            }

            #endregion
        }
Ejemplo n.º 4
0
        public static void PreparePerceptionLayerForPulse(NeuralNet net, double[] input)
        {
            #region Declarations

            int i;

            #endregion

            #region Execution

            if (input.Length != net.m_inputLayer.Count)
            {
                throw new ArgumentException(string.Format("Expecting {0} inputs for this net", net.m_inputLayer.Count));
            }

            // initialize data
            for (i = 0; i < net.m_inputLayer.Count; i++)
            {
                net.m_inputLayer[i].Output = input[i];
            }

            #endregion
        }
Ejemplo n.º 5
0
        private void button1_Click(object sender, EventArgs e)
        {
            #region Declarations

            double        ll, lh, hl, hh;
            int           count, iterations;
            bool          verbose;
            double[][]    input, output;
            StringBuilder bld;

            #endregion

            #region Initialization

            net = new NeuralNet();
            bld = new StringBuilder();

            input    = new double[4][];
            input[0] = new double[] { high, high };
            input[1] = new double[] { low, high };
            input[2] = new double[] { high, low };
            input[3] = new double[] { low, low };

            output    = new double[4][];
            output[0] = new double[] { low };
            output[1] = new double[] { high };
            output[2] = new double[] { high };
            output[3] = new double[] { low };

            verbose    = false;
            count      = 0;
            iterations = 5;

            #endregion

            #region Execution

            // initialize with
            //   2 perception neurons
            //   2 hidden layer neurons
            //   1 output neuron
            net.Initialize(1, 2, 2, 1);

            do
            {
                count++;

                net.LearningRate = 3;
                net.Train(input, output, TrainingType.BackPropogation, iterations);

                net.PerceptionLayer[0].Output = low;
                net.PerceptionLayer[1].Output = low;

                net.Pulse();

                ll = net.OutputLayer[0].Output;

                net.PerceptionLayer[0].Output = high;
                net.PerceptionLayer[1].Output = low;

                net.Pulse();

                hl = net.OutputLayer[0].Output;

                net.PerceptionLayer[0].Output = low;
                net.PerceptionLayer[1].Output = high;

                net.Pulse();

                lh = net.OutputLayer[0].Output;

                net.PerceptionLayer[0].Output = high;
                net.PerceptionLayer[1].Output = high;

                net.Pulse();

                hh = net.OutputLayer[0].Output;

                #region verbose

                if (verbose)
                {
                    bld.Remove(0, bld.Length);

                    bld.Append("PERCEPTION LAYER <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
                    foreach (Neuron pn in net.PerceptionLayer)
                    {
                        AppendNeuronInfo(bld, pn);
                    }

                    bld.Append("\nHIDDEN LAYER <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
                    foreach (Neuron hn in net.HiddenLayer)
                    {
                        AppendNeuronInfo(bld, hn);
                    }

                    bld.Append("\nOUTPUT LAYER <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
                    foreach (Neuron on in net.OutputLayer)
                    {
                        AppendNeuronInfo(bld, on);
                    }

                    bld.Append("\n");

                    bld.Append("hh: \t").Append(hh.ToString()).Append(" \t< .5\n");
                    bld.Append("ll: \t").Append(ll.ToString()).Append(" \t< .5\n");

                    bld.Append("hl: \t").Append(hl.ToString()).Append(" \t> .5\n");
                    bld.Append("lh: \t").Append(lh.ToString()).Append(" \t> .5\n");

                    MessageBox.Show(bld.ToString());
                }

                #endregion
            }
            // really train this thing well...
            while (hh > (mid + low) / 2 ||
                   lh < (mid + high) / 2 ||
                   hl < (mid + low) / 2 ||
                   ll > (mid + high) / 2);


            net.PerceptionLayer[0].Output = low;
            net.PerceptionLayer[1].Output = low;

            net.Pulse();

            ll = net.OutputLayer[0].Output;

            net.PerceptionLayer[0].Output = high;
            net.PerceptionLayer[1].Output = low;

            net.Pulse();

            hl = net.OutputLayer[0].Output;

            net.PerceptionLayer[0].Output = low;
            net.PerceptionLayer[1].Output = high;

            net.Pulse();

            lh = net.OutputLayer[0].Output;

            net.PerceptionLayer[0].Output = high;
            net.PerceptionLayer[1].Output = high;

            net.Pulse();

            hh = net.OutputLayer[0].Output;

            bld.Remove(0, bld.Length);
            bld.Append((count * iterations).ToString()).Append(" iterations required for training\n");

            bld.Append("hh: ").Append(hh.ToString()).Append(" < .5\n");
            bld.Append("ll: ").Append(ll.ToString()).Append(" < .5\n");

            bld.Append("hl: ").Append(hl.ToString()).Append(" > .5\n");
            bld.Append("lh: ").Append(lh.ToString()).Append(" > .5\n");

            MessageBox.Show(bld.ToString());

            #endregion
        }