Exemplo n.º 1
0
    public void TrainNN()
    {
        total     = 0;
        testIndex = 0;
        correct   = 0;
        nn        = new NeuralNetwork(1152, hiddenNeurons, 10);
        errors.Clear();
        trained = true;

        float startTrain = Time.realtimeSinceStartup;

        for (int i = 0; i < m_texLoad.trainLength; ++i)
        {
            // Store errors:
            if ((i % 1 == 0 && i > 0) || i == 1)
            {
                errors.Add(new Point((float)i / m_texLoad.trainLength, nn.cumError / i));
            }

            int label = m_texLoad.GetLabel(i, 1);

            float[] targets = new float[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
            targets [label] = 1;

            Matrix sample = Matrix.fromArray(m_texLoad.GetPixels(i, 1), 28, 28);

            Matrix[] features       = convLayer.Convolve(sample);
            Matrix[] pooledFeatures = convLayer.MaxPool(features);

            float [] FC_Array = new float[pooledFeatures.Length * pooledFeatures[0].getCols() * pooledFeatures[0].getRows()];

            for (int f = 0; f < 8; ++f)
            {
                float[] featureMapArray = pooledFeatures [f].toArray();

                for (int j = 0; j < featureMapArray.Length; ++j)
                {
                    FC_Array [(featureMapArray.Length * f) + j] = featureMapArray [j];
                }
            }

            // Pass convonlved to FC, performing forward and backprop
            nn.train(FC_Array, targets, batchSize);
        }

        trainTime = Time.realtimeSinceStartup - startTrain;
    }