Exemplo n.º 1
0
        public static void Main(string[] args)
        {
            //// inputs
            Byte[] inputByte = MyMath.GetRandomBytes(2);

            double n           = 0.1;
            int    layersCount = 2;

            Layer[] layers = new Layer[layersCount + 1];

            layers[0]           = new Layer();
            layers[0].cellCount = inputByte.Length;
            layers[0].output    = new double[inputByte.Length];
            for (int i = 0; i < inputByte.Length; i++)
            {
                layers[0].output[i] = inputByte[i] / 255.0;
            }

            layers[1]           = new Layer();
            layers[1].cellCount = 3;
            layers[1].function  = TransferFunction.Linear;
            layers[1].bias      = MyMath.GetRandomDoubles(layers[1].cellCount);
            layers[1].weights   = MyMath.GetRandom(layers[0].cellCount, layers[1].cellCount);

            layers[2]           = new Layer();
            layers[2].cellCount = 1;
            layers[2].function  = TransferFunction.Linear;
            layers[2].bias      = MyMath.GetRandomDoubles(layers[2].cellCount);
            layers[2].weights   = MyMath.GetRandom(layers[1].cellCount, layers[2].cellCount);

            for (int z = 0; z < 50; z++)
            {
                //feed forward all
                for (int i = 1; i < layers.Length; i++)
                {
                    layers[i].FeedForward(layers[i - 1].output);
                }

                //backpropagate last layer
                int l = layers.Length - 1;

                double[] targetOutput = new double[layers[l].cellCount];

                //calculate errors
                layers[l].errors = new double[layers[l].cellCount];
                for (int i = 0; i < targetOutput.Length; i++)
                {
                    layers[l].errors[i] = layers[l].output[i] - targetOutput[i];
                }

                Console.WriteLine("FeedForward error: " + Math.Round(MyMath.Sum(layers[l].errors), 4));

                //calculate weights
                ChangeWeights(n, layers, l);

                //backpropagate other layers
                for (l = layers.Length - 2; l > 0; l--)
                {
                    //calculate errors
                    layers[l].errors = new double[layers[l].cellCount];
                    for (int i = 0; i < layers[l].cellCount; i++)
                    {
                        for (int j = 0; j < layers[l + 1].cellCount; j++)
                        {
                            layers[l].errors[i] = layers[l + 1].errors[j] * layers[l + 1].weights[i, j];
                        }
                    }

                    //calculate weights
                    ChangeWeights(n, layers, l);
                }
            }

            Console.ReadKey();
        }
Exemplo n.º 2
0
        public void FeedForward(double[] input)
        {
            output = MyMath.Multiply(input, weights);

            for (int i = 0; i < output.Length; i++)
            {
                output[i] += bias[i];
            }

            double max = 0;

            if (function == TransferFunction.Competitive)
            {
                max = MyMath.GetMax(output);
            }

            for (int i = 0; i < cellCount; i++)
            {
                double n = output[i];
                double a = 0;
                switch (function)
                {
                case TransferFunction.Hard_Limit:
                {
                    a = (n >= 0) ? 1 : 0;
                }
                break;

                case TransferFunction.Symmetrical_Hard_Limit:
                {
                    a = (n >= 0) ? 1 : -1;
                }
                break;

                case TransferFunction.Linear:
                {
                    a = n;
                }
                break;

                case TransferFunction.Saturating_Linear:
                {
                    if (n > 1)
                    {
                        a = 1;
                    }
                    else if (n < 0)
                    {
                        a = 0;
                    }
                    else
                    {
                        a = n;
                    }
                }
                break;

                case TransferFunction.Symmetric_Saturating_Linear:
                {
                    if (n > 1)
                    {
                        a = 1;
                    }
                    else if (n < -1)
                    {
                        a = 0;
                    }
                    else
                    {
                        a = n;
                    }
                }
                break;

                case TransferFunction.Log_Sigmoid:
                {
                    double e = Math.Exp(-n);
                    a = 1.0 / (1 + e);
                }
                break;

                case TransferFunction.Hyperbolic_Tangent_Sigmoid:
                {
                    double e1 = Math.Exp(n);
                    double e2 = Math.Exp(-n);
                    a = (e1 - e2) / (e1 + e2);
                }
                break;

                case TransferFunction.Positive_Linear:
                {
                    a = (n < 0) ? 0 : n;
                }
                break;

                case TransferFunction.Competitive:
                {
                    a = (n == max) ? 1 : 0;
                }
                break;
                }
                output[i] = a;
            }
        }
Exemplo n.º 3
0
        //Back Propagate for Hidden Neuron
        public void BackPropag(vNeuron[,] net, int mPos, double learnRate, double bLearnRate)
        {
            for (int i = 0; i < this.inputNeurons.Count; i++)
            {
                List <List <double> > error = MyMath.matMulti(MyMath.transposeMat(this.weights[i]), MyMath.makeVertiColMat(this.handedDownDelta[0]));

                List <double> fPrime = new List <double>();
                if (this.actFunc == actFuncType.logistic)
                {
                    for (int j = 0; j < this.output.Count; j++)
                    {
                        fPrime.Add(MyMath.logisticPrimeFunc(this.output[j]));
                    }
                }
                else if (this.actFunc == actFuncType.identity)
                {
                    for (int j = 0; j < this.output.Count; j++)
                    {
                        fPrime.Add(1d);
                    }
                }

                List <List <double> > result = MyMath.transposeMat(MyMath.pointwiseMatMulti(error, MyMath.makeVertiColMat(fPrime)));
                this.delta = result[0];
                this.beta  = MyMath.matSub(MyMath.makeHorizonColMat(this.beta), MyMath.scalarProd(bLearnRate, MyMath.makeHorizonColMat(this.delta)))[0];

                int m = inputNeurons[i][0];
                int n = inputNeurons[i][1];

                if (net[m, n].handedDownDelta.Count > 0)
                {
                    net[m, n].handedDownDelta[0] /*.Add(this.delta);*/ = MyMath.colAdd(net[m, n].handedDownDelta[0], this.delta);
                }
                else
                {
                    net[m, n].handedDownDelta.Add(Enumerable.Repeat(0d, delta.Count).ToList());
                    net[m, n].handedDownDelta[0] /*.Add(this.delta);*/ = MyMath.colAdd(net[m, n].handedDownDelta[0], this.delta);
                }

                List <List <double> > matrix = this.weights[i];
                this.newWeights.Add(MyMath.matSub(matrix, MyMath.scalarProd(learnRate, MyMath.matMulti(MyMath.makeVertiColMat(this.delta), MyMath.makeHorizonColMat(this.inputs[i])))));
            }
        }
Exemplo n.º 4
0
        //Back Propagate for Output Neuron
        public void BackPropag(mNeuron[,] net, List <List <double> > expOut, double learnRate, double bLearnRate)
        {
            List <List <double> > error = main.createNewMatrix(this.postActOutput.Count, this.postActOutput[0].Count);

            for (int i = 0; i < expOut.Count; i++)
            {
                for (int j = 0; j < expOut[0].Count; j++)
                {
                    if (expOut[i][j] != -2)
                    {
                        error[i][j] = this.postActOutput[i][j] - expOut[i][j];
                    }
                    else
                    {
                        error[i][j] = 0;
                    }
                }
            }

            List <List <double> > fPrime = new List <List <double> >();

            if (this.actFunc == actFuncType.logistic)
            {
                for (int i = 0; i < this.output.Count; i++)
                {
                    fPrime.Add(new List <double>());
                    for (int j = 0; j < this.output[0].Count; j++)
                    {
                        fPrime[i].Add(MyMath.logisticPrimeFunc(this.output[i][j]));
                    }
                }
            }
            else if (this.actFunc == actFuncType.identity)
            {
                for (int i = 0; i < this.output.Count; i++)
                {
                    fPrime.Add(new List <double>());
                    for (int j = 0; j < this.output[0].Count; j++)
                    {
                        fPrime[i].Add(1d);
                    }
                }
            }

            List <List <double> > result = MyMath.pointwiseMatMulti(error, fPrime);

            this.delta = result;
            this.beta  = MyMath.matSub(this.beta, MyMath.scalarProd(bLearnRate, this.delta));

            for (int i = 0; i < this.inputNeurons.Count; i++)
            {
                int m = inputNeurons[i][0];
                int n = inputNeurons[i][1];

                net[m, n].handedDownDelta = MyMath.matAdd(net[m, n].handedDownDelta, this.delta);

                List <List <double> > matrix   = this.weights[i];
                List <List <double> > gradient = MyMath.matMulti(this.delta, MyMath.transposeMat(this.inputs[i]));
                this.newWeights.Add(MyMath.matSub(matrix, MyMath.scalarProd(learnRate, gradient)));
            }
        }