示例#1
0
 public NeuralLayer(int size, int previousSize, bool activation = false)
 {
     nodes = new NeuralNode[size];
     for (int i = 0; i < size; i++)
     {
         nodes[i] = new NeuralNode(previousSize, activation);
         //Testing
     }
 }
示例#2
0
        public void Train(float[] inputData, float[] expectedOutputData)
        {
            //	for (int o = 0; o < 30; o++)
            {
                timesTrained++;
                List <float[]> forwardfeed    = new List <float[]>(),
                               rawForwardFeed = new List <float[]>();
                List <float[, ]> newWeights   = new List <float[, ]>();
                float[]          lastOutput   = inputData;
                for (int i = 0; i < layers.Length; i++)
                {
                    lastOutput = layers[i].Evaluate(lastOutput);
                    forwardfeed.Add(lastOutput);
                }
                rawForwardFeed.Add(layers[0].EvaluateRaw(inputData));
                for (int i = 1; i < layers.Length; i++)
                {
                    rawForwardFeed.Add(layers[i].EvaluateRaw(forwardfeed[i - 1]));
                }
                int currentLayer = layers.Length - 1, lastLayer = layers.Length - 1;
                //Last layer
                float[,] newW = new float[layers[currentLayer].nodes.Length, layers[currentLayer].nodes[0].weight.Length];
                for (int i = 0; i < layers[currentLayer].nodes.Length; i++)
                {
                    NeuralNode node = layers[currentLayer].nodes[i];
                    for (int j = 0; j < node.weight.Length; j++)
                    {
                        float newWeight = DeriativeOfCrossEntropy(expectedOutputData[i], forwardfeed[currentLayer][i])
                                          * Neural.TanhDer(rawForwardFeed[currentLayer][i])
                                          * forwardfeed[currentLayer - 1][j];
                        newW[i, j] = node.weight[j] - Neural.learnRate * newWeight;
                    }
                }
                newWeights.Insert(0, newW);
                //Rest
                currentLayer--;
                newW = new float[layers[currentLayer].nodes.Length, layers[currentLayer].nodes[0].weight.Length];
                //	List<float> errors = new List<float>();
                for (int i = 0; i < layers[currentLayer].nodes.Length; i++)
                {
                    NeuralNode node = layers[currentLayer].nodes[i];
                    for (int j = 0; j < node.weight.Length; j++)
                    {
                        float totalError = 0;
                        for (int k = 0; k < layers[layers.Length - 1].nodes.Length; k++)
                        {
                            totalError += DeriativeOfCrossEntropy(expectedOutputData[k], forwardfeed[lastLayer][k])
                                          * Neural.TanhDer(rawForwardFeed[lastLayer][k])
                                          * layers[lastLayer].nodes[k].weight[i];
                        }
                        //		errors.Add(totalError);
                        float newWeight = totalError
                                          * Neural.TanhDer(rawForwardFeed[currentLayer][i])
                                          * inputData[j];               //forwardfeed[currentLayer-1][j];
                        newW[i, j] = node.weight[j] - Neural.learnRate * newWeight;
                    }
                }
                newWeights.Insert(0, newW);

                /*	currentLayer--;
                 *      newW = new float[layers[currentLayer].nodes.Length, layers[currentLayer].nodes[0].weight.Length];
                 *      for (int i = 0; i < layers[currentLayer].nodes.Length; i++)
                 *      {
                 *              NeuralNode node = layers[currentLayer].nodes[i];
                 *              for (int j = 0; j < node.weight.Length; j++)
                 *              {
                 *                      float totalError = 0;
                 *                      for (int k = 0; k < layers[layers.Length - 1].nodes.Length; k++)
                 *                      {
                 *                              totalError += errors[k]
                 * Neural.TanhDer(rawForwardFeed[currentLayer+1][k])
                 * layers[currentLayer+1].nodes[k].weight[i];
                 *                      }
                 *                      float newWeight = totalError
                 * Neural.TanhDer(rawForwardFeed[currentLayer][i])
                 * inputData[j];
                 *                      newW[i, j] = node.weight[j]-Neural.learnRate * newWeight;
                 *              }
                 *      }
                 *      newWeights.Insert(0, newW);*/

                for (int i = 0; i < newWeights.Count; i++)
                {
                    newW = newWeights[i];
                    layers[i].UpdateNodes(newW);
                }
            }
        }