Exemplo n.º 1
0
        public double Run(KLearnData sample)
        {
            double error = 0.0;

            _Network.Compute(sample._Input);

            int winner = _Network.GetWinner();

            // get layer of the network
            KLayer layer = _Network[0];

            layer[winner].AddSymbol(sample._Symbol);

            // update weight of the winner only

            // check learning radius
            if (_LearningRadius == 0)
            {
                KNeuron neuron = layer[winner];

                for (int i = 0, n = neuron._Weights.Length; i < n; i++)
                {
                    neuron[i] += (sample._Input[i] - neuron[i]) * 0.01;
                }
            }
            else
            {
                // winner's X and Y
                int wx = winner % _Width;
                int wy = winner / _Width;

                // walk through all neurons of the layer
                for (int j = 0, m = layer._Neurons.Length; j < m; j++)
                {
                    KNeuron neuron = layer[j];

                    int dx = (j % _Width) - wx;
                    int dy = (j / _Width) - wy;

                    // update factor ( Gaussian based )
                    double factor = Math.Exp(-(double)(dx * dx + dy * dy) / _SquaredRadius2);

                    // update weight of the neuron
                    for (int i = 0, n = neuron._Weights.Length; i < n; i++)
                    {
                        // calculate the error
                        double e = (sample._Input[i] - neuron[i]) * factor;
                        error += Math.Abs(e);
                        // update weight
                        neuron[i] += e * _LearningRate;
                    }
                }
            }

            return 0;
        }
Exemplo n.º 2
0
        public static KLearnData[] GetLearnData()
        {
            double[,] dumyData = new double[row,col]

            //                       DISTANCE,EA_ATTACK,EA_STAND,EA_NEAR,EA_DODGE,MA_DODGE,MA_ATTACK,MA_STAND,MA_NEAR,MA_FAR ,    OUTPUT
                             {      {0,       1   ,   0    ,    0    ,   0   ,   1    ,   0   ,    0  ,  0    ,  0    ,(double) ACTION.DODGE },
                                    {0.1,     0   ,   1    ,    0    ,   0   ,   0    ,   1   ,    0  ,  0    ,  0    ,(double) ACTION.ATTACK },
                                    {0.2,     0   ,   1    ,    0    ,   0   ,   0    ,   1   ,    0  ,  0    ,  0    ,(double) ACTION.ATTACK },
                                    {0.4,     0   ,   1    ,    0    ,   0   ,   0    ,   0   ,    0  ,  1    ,  0    ,(double) ACTION.ATTACK },
                              };

            KLearnData[] data = new KLearnData[row];

            for (int j = 0; j < row; j++)
            {
                data[j] = new KLearnData();
                int size = col - 1;
                data[j]._Input = new double[size];

                for (int i = 0; i < col; i++)
                {

                    if (i == col - 1)
                    {
                        int na = (int) dumyData[j, i];
                        data[j]._Symbol = ((ACTION)na).ToString();
                    }
                    else
                    {
                        data[j]._Input[i] = dumyData[j, i];
                    }

                }

            }

            return data;
        }
Exemplo n.º 3
0
        static public KLearnData[] GetLearnData()
        {
            double[,] dumyData = new double[row, col]


                                 //                       DISTANCE,EA_ATTACK,EA_STAND,EA_NEAR,EA_DODGE,MA_DODGE,MA_ATTACK,MA_STAND,MA_NEAR,MA_FAR ,    OUTPUT
            {
                { 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, (double)ACTION.DODGE },
                { 0.1, 0, 1, 0, 0, 0, 1, 0, 0, 0, (double)ACTION.ATTACK },
                { 0.2, 0, 1, 0, 0, 0, 1, 0, 0, 0, (double)ACTION.ATTACK },
                { 0.4, 0, 1, 0, 0, 0, 0, 0, 1, 0, (double)ACTION.ATTACK },
            };


            KLearnData[] data = new KLearnData[row];

            for (int j = 0; j < row; j++)
            {
                data[j] = new KLearnData();
                int size = col - 1;
                data[j]._Input = new double[size];

                for (int i = 0; i < col; i++)
                {
                    if (i == col - 1)
                    {
                        int na = (int)dumyData[j, i];
                        data[j]._Symbol = ((ACTION)na).ToString();
                    }
                    else
                    {
                        data[j]._Input[i] = dumyData[j, i];
                    }
                }
            }

            return(data);
        }
Exemplo n.º 4
0
        public void AddData(InputData input,OUTPUT output )
        {
            StringBuilder s = new StringBuilder();
            s.Append("[");

            int c = 0;
            foreach (var item in input._Input)
            {

                switch ((INPUT)c)
                {
                    case INPUT.ES_STAND:
                    case INPUT.MS_STAND:
                    case INPUT.MS_STAND2:
                    case INPUT.MS_STAND3:
                        s.Append("[");
                        s.Append(item.ToString());
                        break;
                    case INPUT.ES_DODGE:
                    case INPUT.MS_DODGE:
                    case INPUT.MS_DODGE2:
                    case INPUT.MS_DODGE3:
                        s.Append(item.ToString());
                        s.Append("]");

                        break;
                    default:
                        s.Append(item.ToString());
                        break;

                }
                c++;

            }

            s.Append("] [");

            s.Append(output.ToString());

            s.Append("]");

            _form.listBox1.Items.Insert(0,s.ToString());

            KLearnData data = new KLearnData();
            data._Input = input._Input;
            data._Symbol = output.ToString();

            _Learning.Run(data);
        }
Exemplo n.º 5
0
        public double Run(KLearnData sample)
        {
            double error = 0.0;


            _Network.Compute(sample._Input);

            int winner = _Network.GetWinner();

            // get layer of the network
            KLayer layer = _Network[0];

            layer[winner].AddSymbol(sample._Symbol);



            // update weight of the winner only


            // check learning radius
            if (_LearningRadius == 0)
            {
                KNeuron neuron = layer[winner];

                for (int i = 0, n = neuron._Weights.Length; i < n; i++)
                {
                    neuron[i] += (sample._Input[i] - neuron[i]) * 0.01;
                }
            }
            else
            {
                // winner's X and Y
                int wx = winner % _Width;
                int wy = winner / _Width;

                // walk through all neurons of the layer
                for (int j = 0, m = layer._Neurons.Length; j < m; j++)
                {
                    KNeuron neuron = layer[j];

                    int dx = (j % _Width) - wx;
                    int dy = (j / _Width) - wy;

                    // update factor ( Gaussian based )
                    double factor = Math.Exp(-(double)(dx * dx + dy * dy) / _SquaredRadius2);

                    // update weight of the neuron
                    for (int i = 0, n = neuron._Weights.Length; i < n; i++)
                    {
                        // calculate the error
                        double e = (sample._Input[i] - neuron[i]) * factor;
                        error += Math.Abs(e);
                        // update weight
                        neuron[i] += e * _LearningRate;
                    }
                }
            }


            return(0);
        }
Exemplo n.º 6
0
        public double RunEpoch(KLearnData[] input)
        {
            double error = 0.0;

            // walk through all training samples
            foreach (KLearnData sample in input)
            {
                error += Run(sample);
            }

            // return summary error
            return error;
        }