Ejemplo n.º 1
0
        public int StockTest(ref StockDataSet data)
        {
            data.Reset();
            int correct = 0;
            int total   = data.getSize();

            for (int i = 0; i < total; i++)
            {
                StockDataForNetwork networkData = data.GetNextNetworkData();
                SetInputLayer(networkData.InputLayer);
                FeedForward();
                float predictedValue = mNeurons[lastLayerIndex][0];
                if (predictedValue >= 0.5f && networkData.OutputLayer[0] >= 0.5f)
                {
                    correct++;
                    continue;
                }
                if (predictedValue < 0.5f && networkData.OutputLayer[0] < 0.5f)
                {
                    correct++;
                    continue;
                }
            }
            data.Reset();
            return(correct);
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Stochastic gradient descent learning algorithm.
        /// </summary>
        public void SGD(ref StockDataSet trainingData, ref StockDataSet testingData, int epochs, int batchSize, float learningRate, bool output)
        {
            MathNet.Numerics.Control.UseSingleThread(); // Single thread is optimal over multithreading.

            Console.WriteLine("Training...");

            for (int l = 0; l < vSizes.Count; l++)
            {
                mSumW[l].Clear();
                mSumB[l].Clear();
            }

            RandomizeParameters();

            for (int i = 0; i < epochs; i++)
            {
                StockDataForNetwork networkData = trainingData.GetNextNetworkData();
                int j = 0;
                while (networkData != null)
                {
                    SetInputLayer(networkData.InputLayer);
                    FeedForward();
                    SetOutputLayer(networkData.OutputLayer);
                    OutputError();
                    Backprop();

                    // Perform summation calculations over the training images.
                    for (int l = lastLayerIndex; l > 0; l--)
                    {
                        mSumW[l] += mError[l].ToColumnMatrix() * mNeurons[l - 1].ToRowMatrix();
                        mSumB[l] += mError[l];
                    }

                    // Update the weights and biases after the mini-batch has been processed.
                    if (j % batchSize == 0 && j > 0)
                    {
                        for (int l = 0; l < vSizes.Count; l++)
                        {
                            mWeights[l] += mSumW[l].Multiply(-learningRate / batchSize);
                            mBiases[l]  += mSumB[l].Multiply(-learningRate / batchSize);
                            mSumW[l].Clear();
                            mSumB[l].Clear();
                        }
                    }
                    networkData = trainingData.GetNextNetworkData();
                    j++;
                }
                if (output == true)
                {
                    Console.WriteLine("Epoch {0}: {1} / {2}", i, StockTest(ref testingData), testingData.getSize());
                }
                trainingData.Reset();
            }
            Console.WriteLine("Training Complete.");
        }