示例#1
0
文件: learning.cs 项目: MxAR/SGP
        static Boolean huli_flag = true; // hu_layer_index if false kann es kiene weiteren layer geben !

        void learning(int tp_indexer)
        {
            if (first_time)
            {
                learning_first_init();
                foreach (var x in IO_units)
                {
                    Console.Write(x + ", ");
                }
                first_time = false;
                Console.ReadLine();
            }
            for (int i_0 = 0; i_0 < iu_index;)  // input unit run
            {
                neuron IN_01 = new neuron();
                IN_01.n_funk(IO_units[i_0, 0], Program.test_pattern[i_0, tp_indexer]);
                i_0++;
            }
            for (int i_1 = 0; i_1 < hu_layer_index;)    // hidden unit run
            {
                for (int i_2 = 0; i_2 < l_counter[i_1];)
                {
                    neuron IN_02 = new neuron();
                    IN_02.n_funk(hidden_units[i_2, i_1]);
                    i_2++;
                }
                i_1++;
            }
            for (int i_3 = 0; i_3 < ou_index;)  // output unit run
            {
                neuron IN_03 = new neuron();
                IN_03.n_funk(IO_units[i_3, 1]);
                i_3++;
            }
        }
示例#2
0
    // create a new neural network with n neurons and the activation function f
    public neuralnetwork(int n, string type)
    {
        Dictionary <string, Action> typeDict = new Dictionary <string, Action> {
            { "Gaussian wavelet", initGaussianWavelet }
        };

        typeDict[type]();         // initialize to the requested type
        neurons = new neuron[n];
        for (int i = 0; i < n; i++)
        {
            neurons[i] = new neuron();
        }
    }
示例#3
0
            //https://stats.stackexchange.com/questions/47590/what-are-good-initial-weights-in-a-neural-network/186351#186351
            /// <summary>
            /// Initializes the layer.
            /// </summary>
            /// <returns>The init.</returns>
            /// <param name="nn">Number of neurons</param>
            /// <param name="ni">Number of inputs per neuron</param>
            /// <param name="no">Number of outputs per neuron.</param>
            /// <param name="logFunc">Logistics function for the layer.</param>
            /// <param name="hb">If set to <c>true</c> will add bias to layer.</param>
            public void init(int nn, int ni, int no, activationFunction logFunc, bool hb)
            {
                if (hb)
                {
                    ni += 1;
                }
                hasBias = hb;
                outputs = new double[nn];
                if (logFunc != activationFunction.input)
                {
                    neurons = new neuron[nn];
                    for (int i = 0; i < nn; i++)
                    {
                        neurons[i]         = new neuron();
                        neurons[i].logFunc = logFunc;

                        double[] initWeights = new double[ni];
                        double   r           = 0;
                        if (logFunc.Equals(activationFunction.sigmoid))
                        {
                            r = 4 * Math.Sqrt(6 / (ni + no));
                        }
                        else if (logFunc.Equals(activationFunction.tanh))
                        {
                            r = Math.Sqrt(6 / (ni + no));
                        }
                        else
                        {
                            r = 1;//IDK if this good
                        }
                        for (int j = 0; j < ni; j++)
                        {
                            initWeights[j] = random.NextDouble(-r, r);
                        }

                        neurons[i].init(ni, initWeights);
                    }
                }
            }
示例#4
0
    public static void Main(string[] args)
    {
        neuron [] inputLayer   = new neuron[10];
        neuron [] outputLayer  = new neuron[10];
        string    trainingPath = "training.txt";

        string[] trainingData = File.ReadAllLines(trainingPath);
        float [] actualOutput;
        float [] expectedOutput;

        for (int i = 0; i < inputLayer.Length; i++)
        {
            inputLayer[i] = new neuron();
            inputLayer[i].initialize(1);
        }
        for (int i = 0; i < outputLayer.Length; i++)
        {
            outputLayer[i] = new neuron();
            outputLayer[i].initialize(inputLayer.Length);
        }

        for (int i = 0; i < 10000; i++)
        {
            for (int j = 0; j < trainingData.Length; j++)
            {
                string[] currLine = trainingData[j].Split(' ');
                for (int a = 0; a < inputLayer.Length; a++)
                {
                    inputLayer[a].inputs[0] = 0.0f;
                }

                inputLayer[int.Parse(currLine[0])].inputs[0]     = 1.0f;
                inputLayer[int.Parse(currLine[1]) + 5].inputs[0] = 1.0f;

                for (int a = 0; a < inputLayer.Length; a++)
                {
                    inputLayer[a].calculate();
                }

                for (int a = 0; a < outputLayer.Length; a++)
                {
                    for (int b = 0; b < inputLayer.Length; b++)
                    {
                        outputLayer[a].inputs[b] = inputLayer[b].output;
                    }
                }

                for (int a = 0; a < outputLayer.Length; a++)
                {
                    outputLayer[a].calculate();
                }

                actualOutput = new float[outputLayer.Length];
                for (int a = 0; a < outputLayer.Length; a++)
                {
                    actualOutput[a] = outputLayer[a].output;
                }
                expectedOutput = new float[outputLayer.Length];
                expectedOutput[int.Parse(currLine[2])] = 1.0f;
                for (int a = 0; a < outputLayer.Length; a++)
                {
                    outputLayer[a].adjust(expectedOutput[a], actualOutput[a]);
                }
            }
        }

        while (true)
        {
            Console.Write("Enter 2 numbers that are greater than or equal to 1 and less than or equal to 5: ");
            string[] input = Console.ReadLine().Split(' ');
            for (int a = 0; a < inputLayer.Length; a++)
            {
                inputLayer[a].inputs[0] = 0.0f;
            }
            inputLayer[int.Parse(input[0])].inputs[0]     = 1.0f;
            inputLayer[int.Parse(input[1]) + 5].inputs[0] = 1.0f;
            for (int a = 0; a < inputLayer.Length; a++)
            {
                inputLayer[a].calculate();
            }
            for (int a = 0; a < outputLayer.Length; a++)
            {
                for (int b = 0; b < inputLayer.Length; b++)
                {
                    outputLayer[a].inputs[b] = inputLayer[b].output;
                }
            }
            for (int a = 0; a < outputLayer.Length; a++)
            {
                outputLayer[a].calculate();
            }
            actualOutput = new float[outputLayer.Length];
            for (int i = 0; i < outputLayer.Length; i++)
            {
                actualOutput[i] = outputLayer[i].output;
            }
            Console.WriteLine(actualOutput.ToList().IndexOf(actualOutput.Max()));
        }
    }
示例#5
0
        private void EtapaAntrenare()
        {
            double gradient = 0.95;

            x    = new neuron[nr_unitati_intrare];
            y    = new neuron(0);
            h    = new neuron[hidden_neurons];
            x[0] = new neuron(hidden_neurons);
            x[1] = new neuron(hidden_neurons);
            double E;

            for (int i = 0; i < hidden_neurons; i++)
            {
                h[i] = new neuron(1);
            }

            int k = 0;

            double E_epoca = 0;
            int    epoca   = 0;

            if (onlineRadio.Checked == true)
            {
                do
                {
                    errorLabel.Text = $"Eroare: {E_epoca}\nEpoca: {epoca}";
                    errorLabel.Update();
                    epoca++;
                    E_epoca = 0;
                    for (k = 0; k < nr_exemple; k++)
                    {
                        x[0].Out = dateX1[k] / 300;
                        x[1].Out = dateX2[k] / 300;
                        y.Out    = 0;
                        for (int a = 0; a < hidden_neurons; a++)
                        {
                            h[a].Out = 0;
                            for (int i = 0; i < nr_unitati_intrare; i++)
                            {
                                h[a].Out += x[i].W[a] * x[i].Out + h[a].prag;
                            }
                            h[a].Out = F(h[a].Out);
                            y.Out   += h[a].W[0] * h[a].Out + y.prag;
                        }
                        y.Out    = F(y.Out);
                        E        = Math.Pow((y.Out - (dateY[k])), 2);
                        E_epoca += E;
                        double rez = 2 * (y.Out - dateY[k]) * F_derivat(y.Out);
                        for (int a = 0; a < hidden_neurons; a++)
                        {
                            for (int i = 0; i < nr_unitati_intrare; i++)
                            {
                                x[i].W[a] = x[i].W[a] - (gradient * rez * h[a].W[0] * F_derivat(h[a].Out) * x[i].Out);
                            }
                            h[a].prag -= gradient * (rez) * h[a].W[0] * F_derivat(h[a].Out);
                            h[a].W[0] -= (gradient * rez * h[a].Out);
                        }
                        y.prag -= (gradient * rez);
                    }
                } while (E_epoca > 0.1);
            }

            else if (offlineRadio.Checked == true)
            {
                do
                {
                    // gradient = 1.0;
                    double[,] mediePropietati = new double[hidden_neurons, nr_unitati_intrare + 2];
                    //mediePropietati[neuroni,
                    double mediePragY = 0;
                    // if (epoca % 200 == 0)
                    //{
                    errorLabel.Text = $"Eroare: {E_epoca}\nEpoca: {epoca}\nGradient: {gradient}";
                    errorLabel.Update();
                    //}
                    errorLabel.Update();
                    epoca++;
                    E_epoca = 0;
                    for (k = 0; k < nr_exemple; k++)
                    {
                        x[0].Out = dateX1[k] / 300;
                        x[1].Out = dateX2[k] / 300;
                        y.Out    = 0;
                        for (int a = 0; a < hidden_neurons; a++)
                        {
                            h[a].Out = 0;
                            for (int i = 0; i < nr_unitati_intrare; i++)
                            {
                                h[a].Out += x[i].W[a] * x[i].Out + h[a].prag;
                            }
                            h[a].Out = F(h[a].Out);
                            y.Out   += h[a].W[0] * h[a].Out + y.prag;
                        }
                        y.Out    = F(y.Out);
                        E        = Math.Pow((y.Out - (dateY[k])), 2);
                        E_epoca += E;
                        double rez = 2 * (y.Out - dateY[k]) * F_derivat(y.Out);
                        for (int a = 0; a < hidden_neurons; a++)
                        {
                            for (int i = 0; i < nr_unitati_intrare; i++)
                            {
                                mediePropietati[a, i] += (gradient * rez * h[a].W[0] * F_derivat(h[a].Out) * x[i].Out);
                            }
                            mediePropietati[a, nr_unitati_intrare]     += gradient * (rez) * h[a].W[0] * F_derivat(h[a].Out);
                            mediePropietati[a, nr_unitati_intrare + 1] += (gradient * rez * h[a].Out);
                        }
                        mediePragY += (gradient * rez);
                    }

                    for (int a = 0; a < hidden_neurons; a++)
                    {
                        for (int i = 0; i < nr_unitati_intrare; i++)
                        {
                            mediePropietati[a, i] /= nr_exemple;
                            x[i].W[a]             -= mediePropietati[a, i];
                        }
                        mediePropietati[a, nr_unitati_intrare] /= nr_exemple;

                        mediePropietati[a, nr_unitati_intrare + 1] /= nr_exemple;
                        h[a].prag -= mediePropietati[a, nr_unitati_intrare];
                        h[a].W[0] -= mediePropietati[a, nr_unitati_intrare + 1] /= nr_exemple;
                    }
                    mediePragY /= nr_exemple;
                    y.prag     -= mediePragY;
                    //  if (gradient > 0.2)
                    {
                        // gradient -= 0.0001;
                    }
                } while (E_epoca > 150);
            }
        }
示例#6
0
        private void EtapaAntrenare()
        {
            int    nr_unitati_intrare = 2, hidden_neurons = 2;
            double gradient = 0.5;
            double E;

            neuron[] x = new neuron[nr_unitati_intrare];
            neuron   y = new neuron(0);

            neuron[] h = new neuron[hidden_neurons];
            x[0] = new neuron(hidden_neurons);
            x[1] = new neuron(hidden_neurons);
            h[0] = new neuron(1);
            h[1] = new neuron(1);
            int k          = 0;
            int nr_exemple = 4;

            double[] dateX1 = new double[nr_exemple];
            double[] dateX2 = new double[nr_exemple];
            double[] dateY  = new double[nr_exemple];
            string   path   = "input.txt";

            string[] lines = File.ReadAllLines(path);
            for (int i = 0; i < lines.Length; i++)
            {
                string[] words = lines[i].Split(' ');
                dateX1[i] = Convert.ToDouble(words[0]);
                dateX2[i] = Convert.ToDouble(words[1]);
                dateY[i]  = Convert.ToDouble(words[2]);
            }
            double E_epoca = 0;
            int    epoca   = 0;

            do
            {
                epoca++;

                label1.Text = $"Eroare: {E_epoca} \nEpoca: {epoca}";
                label1.Update();


                E_epoca = 0;
                for (k = 0; k < nr_exemple; k++)
                {
                    x[0].Out = dateX1[k];
                    x[1].Out = dateX2[k];
                    #region labels

                    X0outLabel.Text = x[0].Out.ToString();
                    X1outLabel.Text = x[1].Out.ToString();
                    H0outLabel.Text = h[0].Out.ToString();
                    H1outLabel.Text = h[1].Out.ToString();
                    YoutLabel.Text  = y.Out.ToString();
                    X0W0_label.Text = x[0].W[0].ToString();
                    X0W1_label.Text = x[0].W[1].ToString();
                    X1W1_label.Text = x[1].W[1].ToString();
                    X1W0_label.Text = x[1].W[0].ToString();
                    H0W0_Label.Text = h[0].W[0].ToString();
                    H1W0_Label.Text = h[1].W[0].ToString();
                    Prag_H0.Text    = h[0].prag.ToString();
                    Prag_H1.Text    = h[1].prag.ToString();
                    Prag_Y.Text     = y.prag.ToString();
                    H0W0_Label.Update();
                    H1W0_Label.Update();
                    X0outLabel.Update();
                    X1outLabel.Update();
                    H0outLabel.Update();
                    H1outLabel.Update();
                    panel1.Update();
                    YoutLabel.Update();
                    X0W0_label.Update();
                    X0W1_label.Update();
                    X1W0_label.Update();
                    X1W1_label.Update();
                    Prag_H0.Update();
                    Prag_H1.Update();
                    Prag_Y.Update();

                    #endregion



                    y.Out = 0;
                    for (int a = 0; a < hidden_neurons; a++)
                    {
                        h[a].Out = 0;
                        for (int i = 0; i < nr_unitati_intrare; i++)
                        {
                            h[a].Out += x[i].W[a] * x[i].Out + h[a].prag;
                        }
                        h[a].Out = F(h[a].Out);
                        y.Out   += h[a].W[0] * h[a].Out + y.prag;
                    }
                    y.Out    = F(y.Out);
                    E        = Math.Pow((y.Out - dateY[k]), 2);
                    E_epoca += E;



                    double rez = 2 * (y.Out - dateY[k]) * F_derivat(y.Out);
                    for (int a = 0; a < hidden_neurons; a++)
                    {
                        for (int i = 0; i < nr_unitati_intrare; i++)
                        {
                            x[i].W[a] = x[i].W[a] - (gradient * rez * h[a].W[0] * F_derivat(h[a].Out) * x[i].Out);
                        }
                        h[a].prag -= gradient * (rez) * h[a].W[0] * F_derivat(h[a].Out);
                        h[a].W[0] -= (gradient * rez * h[a].Out);
                    }
                    y.prag -= (gradient * rez);
                }
                //Thread.Sleep(2000);
            } while (E_epoca > Math.Pow(10, -3));
        }
示例#7
0
    private void sensor_input(float[] inputs, GameObject target_AI)
    {
        //int gender;
        //float size;
        //float sight_acc
        //float dist
        //int enemies_insight
        //int aimed

        int target_gene_index = testants_ID[target_AI];

        //zero values & insert value
        for (int i = 0; i < N_HIDDEN_LAYERS + 2; i++)
        {
            for (int j = 0; j < gene_pool[target_gene_index].neuro_network[i].Length; j++)
            {
                if (i == 0)
                {
                    gene_pool[target_gene_index].neuro_network[0][j].value = inputs[j];
                    //Debug.Log();
                }
                else
                {
                    gene_pool[target_gene_index].neuro_network[i][j].value = 0;
                }
            }
        }
        //Debug.Log("test connection: "+ gene_pool[0].neuro_network.Length);
        //for each layer before the output layer
        for (int i = 0; i < N_HIDDEN_LAYERS + 1; i++)
        {
            neuron geno1 = gene_pool[target_gene_index].neuro_network[i][0];
            for (int k = 0; k < geno1.conn_weight.Length; k++)
            {
                gene_pool[target_gene_index].neuro_network[i + 1][k].value += geno1.conn_weight[k] * geno1.value;
            }
            neuron geno2 = gene_pool[target_gene_index].neuro_network[i][1];
            for (int k = 0; k < geno2.conn_weight.Length; k++)
            {
                gene_pool[target_gene_index].neuro_network[i + 1][k].value += geno2.conn_weight[k] * geno2.value;
            }
            neuron geno3 = gene_pool[target_gene_index].neuro_network[i][2];
            for (int k = 0; k < geno3.conn_weight.Length; k++)
            {
                gene_pool[target_gene_index].neuro_network[i + 1][k].value += geno3.conn_weight[k] * geno3.value;
            }
            neuron geno4 = gene_pool[target_gene_index].neuro_network[i][3];
            for (int k = 0; k < geno4.conn_weight.Length; k++)
            {
                gene_pool[target_gene_index].neuro_network[i + 1][k].value += geno4.conn_weight[k] * geno4.value;
            }
            neuron geno5 = gene_pool[target_gene_index].neuro_network[i][4];
            for (int k = 0; k < geno5.conn_weight.Length; k++)
            {
                gene_pool[target_gene_index].neuro_network[i + 1][k].value += geno5.conn_weight[k] * geno5.value;
            }
            neuron geno6 = gene_pool[target_gene_index].neuro_network[i][5];
            for (int k = 0; k < geno6.conn_weight.Length; k++)
            {
                gene_pool[target_gene_index].neuro_network[i + 1][k].value += geno6.conn_weight[k] * geno6.value;
            }
        }

        //output actions
        turn_to_face(gene_pool[target_gene_index].neuro_network[N_HIDDEN_LAYERS + 1][0].value, target_AI);
        move(gene_pool[target_gene_index].neuro_network[N_HIDDEN_LAYERS + 1][1].value, (int)inputs[0], inputs[1], target_AI);
        scream(gene_pool[target_gene_index].neuro_network[N_HIDDEN_LAYERS + 1][2].value, (int)inputs[0]);
    }