Exemple #1
0
        private static void train(int ep)
        {
            // the input values
            double[,] inputs =
            {
                { 0, 0 },
                { 0, 1 },
                { 1, 0 },
                { 1, 1 }
            };

            // desired results
            double[] results = { 0, 1, 1, 0 };

            // creating the neurons
            Neuron       hiddenNeuron1 = new Neuron();
            Neuron       hiddenNeuron2 = new Neuron();
            Neuron       hiddenNeuron3 = new Neuron();
            OutputNeuron outputNeuron  = new OutputNeuron();

            // random weights
            hiddenNeuron1.randomizeWeights();
            hiddenNeuron2.randomizeWeights();
            hiddenNeuron3.randomizeWeights();
            outputNeuron.randomizeWeights();

            int epoch = 0;

Retry:
            epoch++;
            for (int i = 0; i < 4; i++)  // very important, do NOT train for only one example
            {
                // 1) forward propagation (calculates output)
                hiddenNeuron1.inputs = new double[] { inputs[i, 0], inputs[i, 1] };
                hiddenNeuron2.inputs = new double[] { inputs[i, 0], inputs[i, 1] };

                outputNeuron.inputs = new double[] { hiddenNeuron1.output, hiddenNeuron2.output };

                Console.WriteLine("{0} xor {1} = {2}", inputs[i, 0], inputs[i, 1], outputNeuron.output());
                Console.WriteLine(outputNeuron.output()[0]);
                Console.WriteLine(outputNeuron.output()[1]);
                Console.WriteLine(outputNeuron.output()[2]);
                // 2) back propagation (adjusts weights)

                // adjusts the weight of the output neuron, based on its error
                outputNeuron.error = sigmoid.derivative(outputNeuron.output) * (results[i] - outputNeuron.output);

                //get { return sigmoid.output(weights[0] * inputs[0] + weights[1] * inputs[1] + biasWeight); }

                outputNeuron.adjustWeights();

                // then adjusts the hidden neurons' weights, based on their errors
                hiddenNeuron1.error = sigmoid.derivative(hiddenNeuron1.output) * outputNeuron.error * outputNeuron.weights[0];
                hiddenNeuron2.error = sigmoid.derivative(hiddenNeuron2.output) * outputNeuron.error * outputNeuron.weights[1];

                hiddenNeuron1.adjustWeights();
                hiddenNeuron2.adjustWeights();
            }

            if (epoch < ep)
            {
                goto Retry;
            }
            OutPutNeuron1 = outputNeuron;
            HiddenNeuron1 = hiddenNeuron1;
            HiddenNeuron2 = hiddenNeuron2;
            Console.ReadLine();
        }
Exemple #2
0
        private static void train()
        {
            // the input values
            double[] inputs = { 0.10, 0.06, 0.25, 0.16, 0.81, 1f, 0.36, 0.49, 0.01, 0.04, 1.21, 1.44, 1.69 };

            // desired results
            double[] results = { 0.023026, 0.0179176, 0.03218876, 0.0277259, 0.0439445, 0.0460517, 0.035835189, 0.03891820298, 0, 0.0138629, 0.04795791, 0.049698133, 0.05129899 };

            // creating the neurons
            Neuron       N1     = new Neuron();
            Neuron       N2     = new Neuron();
            OutputNeuron O1     = new OutputNeuron();
            OutputNeuron O2     = new OutputNeuron();
            OutputNeuron Output = new OutputNeuron();

            // random weights
            N1.randomizeWeights();
            N2.randomizeWeights();
            O1.randomizeWeights();
            O2.randomizeWeights();
            Output.randomizeWeights();

            int epoch = 0;

Retry:
            epoch++;
            for (int i = 0; i < 13; i++)  // very important, do NOT train for only one example
            {
                // 1) forward propagation (calculates output)
                N1.input         = inputs[i];
                N2.input         = inputs[i];
                O1.inputs[0]     = N1.output;
                O1.inputs[1]     = N2.output;
                O2.inputs[0]     = N1.output;
                O2.inputs[1]     = N2.output;
                Output.inputs[0] = O1.output;
                Output.inputs[1] = O2.output;


                Console.WriteLine("Entrada: {0} Salida: {1}", 100 * N1.input, 100 * Output.output);

                // 2) back propagation (adjusts weights)

                // adjusts the weight of the output neuron, based on its error
                Output.error = sigmoid.derivative(Output.output) * (results[i] - Output.output);
                Output.adjustWeights();

                // then adjusts the hidden neurons' weights, based on their errors
                O1.error = sigmoid.derivative(O1.output) * O1.error * ((Output.weights[0] + Output.weights[1]) / 2);
                O2.error = sigmoid.derivative(O2.output) * O2.error * ((Output.weights[0] + Output.weights[1]) / 2);
                N1.error = sigmoid.derivative(N1.output) * N1.error * ((O1.weights[0] + O1.weights[1]) / 2);
                N2.error = sigmoid.derivative(N2.output) * O2.error * ((O2.weights[0] + O2.weights[1]) / 2);

                N1.adjustWeights();
                N2.adjustWeights();
                O1.adjustWeights();
                O2.adjustWeights();
                Output.adjustWeights();
            }

            if (epoch < 200000)
            {
                goto Retry;
            }

            Console.ReadLine();
            bool salir = false;

            while (salir == false)
            {
                Console.WriteLine("Introduzca una entrada:");
                float entrada = Convert.ToSingle(Console.ReadLine());
                N1.input = entrada / 100;
                N2.input = entrada / 100;

                O1.inputs[0] = N1.output;
                O1.inputs[1] = N2.output;

                O2.inputs[0] = N1.output;
                O2.inputs[1] = N2.output;

                Output.inputs[0] = O1.output;
                Output.inputs[1] = O2.output;
                Console.WriteLine("Salida: " + 100 * Output.output);
            }
        }