Esempio n. 1
0
        public ANN()
        {
            Console.Write("Input the level of hidden layer: ");
            Console.WriteLine("This is a sample for Git hub TEST! Again!");
            int          layer_num = Int32.Parse(Console.ReadLine()) + 2;
            Initial_DATA initial   = new Initial_DATA(layer_num);

            for (int num = 0; (Math.Abs((double)(initial.layers[layer_num - 1].original[0] - initial.layers[layer_num - 1].values[initial.layers[layer_num - 1].values.Count - 1])) > 10); num++)
            {
                Back_Propagation(ref initial);

                for (int i = 1; i < layer_num; i++)
                {
                    initial.layers[i].Result_Values(initial.layers[i].original, initial.layers[i].weights);
                }

                for (int i = 1; i < layer_num; i++)
                {
                    for (int j = 0; j < initial.layers[i].weights.Count; j++)
                    {
                        for (int k = 0; k < initial.layers[i].weights[j].Length; k++)
                        {
                            Console.Write($"a[{j}][{k}]={initial.layers[i].weights[j][k]} ");
                        }
                        Console.WriteLine();
                    }
                }
            }
        }
Esempio n. 2
0
        void Back_Propagation(ref Initial_DATA list)
        {
            int          size         = list.layer_num;
            List <float> original_tmp = new List <float>();

            for (int i = size - 1; i > 0; i--)
            {
                if (i == size - 1)
                {
                    float[] sub_val = new float[list.layers[i].neurons];
                    for (int j = 0; j < list.layers[i].neurons; j++)
                    {
                        sub_val[j] = list.layers[i].original[j] - list.layers[i].values[j];
                        for (int k = 0; k < list.layers[i].weights[j].Length; k++)
                        {
                            if (sub_val[j] != 0)
                            {
                                list.layers[i].weights[j][k] += list.layers[i].weights[j][k] / sub_val[j];
                            }
                        }
                    }
                }

                else
                {
                    float[] sub_val = new float[list.layers[i].neurons];
                    for (int j = 0; j < list.layers[i].neurons; j++)
                    {
                        List <float> original = new List <float>();
                        for (int k = 0; k < list.layers[i + 1].original.Count; k++)
                        {
                            original.Add(list.layers[i + 1].original[k] / list.layers[i + 1].weights[j][k]);
                        }

                        list.layers[i].original.Add(original.Sum());
                    }

                    for (int j = 0; j < list.layers[i].neurons; j++)
                    {
                        sub_val[j] = list.layers[i].original[j] - list.layers[i].values[j];
                        for (int k = 0; k < list.layers[i].weights[j].Length; k++)
                        {
                            if (sub_val[j] != 0)
                            {
                                list.layers[i].weights[j][k] += list.layers[i].weights[j][k] / sub_val[j];
                            }
                        }
                    }
                }
            }
        }