Beispiel #1
0
        private void runNeuralNet_Click(object sender, EventArgs e)
        {
            reset.Enabled = true;

            var bmp   = new BitmapImage(PanelToBitmap());
            var blobs = blobFinder.From(bmp);

            if (blobs.Any())
            {
                panel1.CreateGraphics().DrawRectangles(Pens.Red, blobs.ToArray());

                var imageSampler = new ImageSampler();
                var samples      = imageSampler.SampleImage(bmp).GetSamples().Select(s => s.NormalizedLuminosity).ToArray();
                var output       = bnn.ComputeOutputs(samples);

                if (output[0] > 0.90 && output[1] < 0.05)
                {
                    BackColor = Color.Green;
                }
                else
                {
                    BackColor = Color.Red;
                }
            }
            repaint = false;
        }
Beispiel #2
0
        private List <double> GetQValues(State state)
        {
            List <double> output = new List <double>();
            List <double> inputs = GetInput(state);

            output.AddRange(network.ComputeOutputs(inputs.ToArray()));
            return(output);
        }
        private double GetAverageError(bool printDebugOutput)
        {
            int    count  = 0;
            double avgErr = 0;

            foreach (StockHistory stock in m_StockHistorySet.AllStockHistories)
            {
                count++;

                double realValue       = stock.Closes[Today + DaysInFuture] / stock.Closes[Today];
                double predictedOutput = m_Bnn.ComputeOutputs(ComputeInputs(stock))[0];
                double predictedValue  = ATanh(predictedOutput);

                avgErr += Math.Abs(realValue - predictedValue);
            }

            return(avgErr);
        }
        public void Setup(IEnumerable <IFunctor> functors, StockHistorySet stockHistorySet)
        {
            Fns = functors.ToArray();
            m_StockHistorySet = stockHistorySet;

            if (this.Fns == null)
            {
                throw new ApplicationException();
            }
            if (!Fns.Any())
            {
                throw new ApplicationException();
            }

            int numInput   = Fns.Length;
            int numHidden  = 8;
            int numOutput  = 1;
            int numWeights = (numInput * numHidden) + (numHidden * numOutput) + (numHidden + numOutput);

            DebugWrite("Creating a " + numInput + "-input, " + numHidden + "-hidden, " + numOutput + "-output neural network");
            DebugWrite("Using hard-coded tanh function for hidden layer activation");
            DebugWrite("Using hard-coded log-sigmoid function for output layer activation");

            m_Bnn = new BackPropNeuralNet(numInput, numHidden, numOutput);

            DebugWrite("\nGenerating random initial weights and bias values");
            double[] initWeights = new double[numWeights];
            for (int i = 0; i < initWeights.Length; ++i)
            {
                initWeights[i] = (rnd.NextDouble() - 0.5d) * 1.0d;
            }
            DebugWrite("Loading neural network initial weights and biases into neural network");
            m_Bnn.SetWeights(initWeights);

            double learnRate = 0.2; // learning rate - controls the maginitude of the increase in the change in weights.
            double momentum  = 0.1; // momentum - to discourage oscillation.

            DebugWrite("Setting learning rate = " + learnRate.ToString("F2") + " and momentum = " + momentum.ToString("F2"));

            int    maxEpochs   = 8000000;
            double errorThresh = 0.01;

            DebugWrite("\nSetting max epochs = " + maxEpochs + " and error threshold = " + errorThresh.ToString("F6"));

            // Train

            int    epoch = 0;
            double error = double.MaxValue;

            DebugWrite("\nBeginning training using back-propagation\n");

            int stocksCount = stockHistorySet.AllStockHistories.Count();;

            while (epoch < maxEpochs) // train
            {
                int stockNum = rnd.Next(0, stocksCount);
                var stock    = stockHistorySet.AllStockHistories[stockNum];

                double realValue   = stock.Closes[Today + DaysInFuture] / stock.Closes[Today];
                double updateValue = Math.Tanh(realValue);

                double predictedOutput = m_Bnn.ComputeOutputs(ComputeInputs(stock))[0];
                double predictedValue  = ATanh(predictedOutput) * stock.Closes[Today];

                m_Bnn.UpdateWeights(new[] { updateValue }, learnRate, momentum);
                ++epoch;

                if (epoch % 20000 == 0)
                {
                    error = GetAverageError(true);

                    if (error < errorThresh)
                    {
                        DebugWrite("Found weights and bias values that meet the error criterion at epoch " + epoch);
                        break;
                    }
                    DebugWrite("epoch = " + epoch);
                    DebugWrite(" error = " + error + "\n");
                }
            } // train loop

            double[] finalWeights = m_Bnn.GetWeights();
            DebugWrite("");
            DebugWrite("Final neural network weights and bias values are:");
            Helpers.ShowVector(finalWeights, 5, 8, true);
        }
Beispiel #5
0
 private double GetQValue(State state, int actionIndex)
 {
     return(network.ComputeOutputs(GetInput(state, actionIndex).ToArray())[0]);
 }
        public void Setup(IEnumerable<IFunctor> functors, StockHistorySet stockHistorySet)
        {
            Fns = functors.ToArray();
            m_StockHistorySet = stockHistorySet;

            if (this.Fns == null) throw new ApplicationException();
            if (!Fns.Any()) throw new ApplicationException();

            int numInput = Fns.Length;
            int numHidden = 8;
            int numOutput = 1;
            int numWeights = (numInput * numHidden) + (numHidden * numOutput) + (numHidden + numOutput);

            DebugWrite("Creating a " + numInput + "-input, " + numHidden + "-hidden, " + numOutput + "-output neural network");
            DebugWrite("Using hard-coded tanh function for hidden layer activation");
            DebugWrite("Using hard-coded log-sigmoid function for output layer activation");

            m_Bnn = new BackPropNeuralNet(numInput, numHidden, numOutput);

            DebugWrite("\nGenerating random initial weights and bias values");
            double[] initWeights = new double[numWeights];
            for (int i = 0; i < initWeights.Length; ++i)
                initWeights[i] = (rnd.NextDouble() - 0.5d) * 1.0d;
            DebugWrite("Loading neural network initial weights and biases into neural network");
            m_Bnn.SetWeights(initWeights);

            double learnRate = 0.2;  // learning rate - controls the maginitude of the increase in the change in weights.
            double momentum = 0.1; // momentum - to discourage oscillation.
            DebugWrite("Setting learning rate = " + learnRate.ToString("F2") + " and momentum = " + momentum.ToString("F2"));

            int maxEpochs = 8000000;
            double errorThresh = 0.01;
            DebugWrite("\nSetting max epochs = " + maxEpochs + " and error threshold = " + errorThresh.ToString("F6"));

            // Train

            int epoch = 0;
            double error = double.MaxValue;
            DebugWrite("\nBeginning training using back-propagation\n");

            int stocksCount = stockHistorySet.AllStockHistories.Count(); ;

            while (epoch < maxEpochs) // train
            {
                int stockNum = rnd.Next(0, stocksCount);
                var stock = stockHistorySet.AllStockHistories[stockNum];

                double realValue = stock.Closes[Today + DaysInFuture] / stock.Closes[Today];
                double updateValue = Math.Tanh(realValue);

                double predictedOutput = m_Bnn.ComputeOutputs(ComputeInputs(stock))[0];
                double predictedValue = ATanh(predictedOutput) * stock.Closes[Today];

                m_Bnn.UpdateWeights(new[] { updateValue }, learnRate, momentum);
                ++epoch;

                if (epoch % 20000 == 0)
                {
                    error = GetAverageError(true);

                    if (error < errorThresh)
                    {
                        DebugWrite("Found weights and bias values that meet the error criterion at epoch " + epoch);
                        break;
                    }
                    DebugWrite("epoch = " + epoch);
                    DebugWrite(" error = " + error + "\n");
                }
            } // train loop

            double[] finalWeights = m_Bnn.GetWeights();
            DebugWrite("");
            DebugWrite("Final neural network weights and bias values are:");
            Helpers.ShowVector(finalWeights, 5, 8, true);
        }