Пример #1
0
        public int StockTest(ref StockDataSet data)
        {
            data.Reset();
            int correct = 0;
            int total   = data.getSize();

            for (int i = 0; i < total; i++)
            {
                StockDataForNetwork networkData = data.GetNextNetworkData();
                SetInputLayer(networkData.InputLayer);
                FeedForward();
                float predictedValue = mNeurons[lastLayerIndex][0];
                if (predictedValue >= 0.5f && networkData.OutputLayer[0] >= 0.5f)
                {
                    correct++;
                    continue;
                }
                if (predictedValue < 0.5f && networkData.OutputLayer[0] < 0.5f)
                {
                    correct++;
                    continue;
                }
            }
            data.Reset();
            return(correct);
        }
Пример #2
0
        static void Main()
        {
            // Load data from CSV.
            List <StockDataPoint> dataPoints = StockDataUtility.ReadStockFile(@"msft.us.csv");

            // Create a normalized training set of all of the CSV data.
            StockDataSet trainingSet = new StockDataSet(dataPoints, true);

            // Remove a percentage of data points from training set to create a testing set.
            int testingSize = (int)(trainingSet.getSize() * 0.20f);
            List <StockDataPoint> testingPoints = new List <StockDataPoint>();

            for (int i = 0; i < testingSize; i++)
            {
                testingPoints.Add(trainingSet.RandomRemoveFromSet());
            }
            StockDataSet testingSet = new StockDataSet(testingPoints, false); // No need to normalize the data since it has already been normalized.

            // Create the neural network.
            var sizes = new List <int> {
                5, 30, 30, 1
            };
            var net = new Network(sizes, Activation.Sigmoid);

            // Driver code here.
            //net.LoadNetwork(@"network.dat");
            net.SGD(ref trainingSet, ref testingSet, 50, 10, 0.01f, true);
            //net.StockTest(ref testingSet);
            net.SaveNetwork(@"network.dat");
            Console.ReadKey();
        }
Пример #3
0
        /// <summary>
        /// Stochastic gradient descent learning algorithm.
        /// </summary>
        public void SGD(ref StockDataSet trainingData, ref StockDataSet testingData, int epochs, int batchSize, float learningRate, bool output)
        {
            MathNet.Numerics.Control.UseSingleThread(); // Single thread is optimal over multithreading.

            Console.WriteLine("Training...");

            for (int l = 0; l < vSizes.Count; l++)
            {
                mSumW[l].Clear();
                mSumB[l].Clear();
            }

            RandomizeParameters();

            for (int i = 0; i < epochs; i++)
            {
                StockDataForNetwork networkData = trainingData.GetNextNetworkData();
                int j = 0;
                while (networkData != null)
                {
                    SetInputLayer(networkData.InputLayer);
                    FeedForward();
                    SetOutputLayer(networkData.OutputLayer);
                    OutputError();
                    Backprop();

                    // Perform summation calculations over the training images.
                    for (int l = lastLayerIndex; l > 0; l--)
                    {
                        mSumW[l] += mError[l].ToColumnMatrix() * mNeurons[l - 1].ToRowMatrix();
                        mSumB[l] += mError[l];
                    }

                    // Update the weights and biases after the mini-batch has been processed.
                    if (j % batchSize == 0 && j > 0)
                    {
                        for (int l = 0; l < vSizes.Count; l++)
                        {
                            mWeights[l] += mSumW[l].Multiply(-learningRate / batchSize);
                            mBiases[l]  += mSumB[l].Multiply(-learningRate / batchSize);
                            mSumW[l].Clear();
                            mSumB[l].Clear();
                        }
                    }
                    networkData = trainingData.GetNextNetworkData();
                    j++;
                }
                if (output == true)
                {
                    Console.WriteLine("Epoch {0}: {1} / {2}", i, StockTest(ref testingData), testingData.getSize());
                }
                trainingData.Reset();
            }
            Console.WriteLine("Training Complete.");
        }