Example #1
0
        /// <summary>
        /// Process the individual training file.
        /// </summary>
        /// <param name="file">The training file to process.</param>
        /// <param name="output">The data set to output to.</param>
        protected void ProcessFile(string file, BufferedMLDataSet output)
        {
            var inputData = new BasicMLData(output.InputSize);
            var idealData = new BasicMLData(output.IdealSize);

            var csv = new ReadCSV(file, true, CSVFormat.English);

            while (csv.Next())
            {
                var    a     = new double[Config.InputWindow + 1];
                double close = csv.GetDouble(1);

                const int fastIndex = 2;
                const int slowIndex = fastIndex + Config.InputWindow;

                a[0] = close;
                for (int i = 0; i < 3; i++)
                {
                    double fast = csv.GetDouble(fastIndex + i);
                    double slow = csv.GetDouble(slowIndex + i);
                    double diff = _fieldDifference.Normalize((fast - slow) / Config.PipSize);
                    a[i + 1] = diff;
                }
                _window.Add(a);

                if (_window.IsFull())
                {
                    double max = (_window.CalculateMax(0, Config.InputWindow) - close) / Config.PipSize;
                    double min = (_window.CalculateMin(0, Config.InputWindow) - close) / Config.PipSize;

                    double o = Math.Abs(max) > Math.Abs(min) ? max : min;

                    a = _window.GetLast();
                    for (int i = 0; i < 3; i++)
                    {
                        inputData[i] = a[i + 1];
                    }

                    o            = _fieldOutcome.Normalize(o);
                    idealData[0] = o;

                    output.Add(inputData, idealData);
                }
            }
        }
Example #2
0
        public string CreateMessage(string request)
        {
            string  resulttext = string.Empty;
            var     inputs     = createDoubles(request);
            IMLData data       = new BasicMLData(inputs);
            var     output     = NeuralNetwork.Compute(data);

            for (int i = 0; i < output.Count; i++)
            {
                var id  = Convert.ToInt32(output[i] * 1000000000);
                var row = storageDbRepository.GetStorageRowFromID(id);
                if (!string.IsNullOrEmpty(row.Text))
                {
                    resulttext += $"{row.Text} ";
                }
            }
            return(resulttext);
        }
Example #3
0
        public static IMLDataSet CreateNoisyXORDataSet(int count)
        {
            var result = new BasicMLDataSet();

            for (int i = 0; i < count; i++)
            {
                for (int j = 0; j < 4; j++)
                {
                    var inputData = new BasicMLData(XORInput[j]);
                    var idealData = new BasicMLData(XORIdeal[j]);
                    var pair      = new BasicMLDataPair(inputData, idealData);
                    inputData[0] = inputData[0] + RangeRandomizer.Randomize(-0.1, 0.1);
                    inputData[1] = inputData[1] + RangeRandomizer.Randomize(-0.1, 0.1);
                    result.Add(pair);
                }
            }
            return(result);
        }
        public Int16 Play(double[] inputs)
        {
            Int16 retVal = -1;

            var input = new BasicMLData(3);

            input[0] = xInput.Normalize(inputs[0]);
            input[1] = yInput.Normalize(inputs[1]);
            input[2] = bumpedIntoWallInput.Normalize(inputs[2]);

            IMLData output      = network.Compute(input);
            double  denormValue = output[0];
            double  normValue   = Math.Round(directionOutput.DeNormalize(denormValue));

            retVal = Convert.ToInt16(normValue);

            return(retVal);
        }
Example #5
0
        /// <summary>
        /// Construct the chain rule calculation.
        /// </summary>
        ///
        /// <param name="network">The network to use.</param>
        /// <param name="indexableTraining">The training set to use.</param>
        public JacobianChainRule(BasicNetwork network,
                                 IMLDataSet indexableTraining)
        {
            _indexableTraining = indexableTraining;
            _network           = network;
            _parameterSize     = network.Structure.CalculateSize();
            _inputLength       = (int)_indexableTraining.Count;
            _jacobian          = EngineArray.AllocateDouble2D(_inputLength, _parameterSize);

            _rowErrors = new double[_inputLength];

            var input = new BasicMLData(
                _indexableTraining.InputSize);
            var ideal = new BasicMLData(
                _indexableTraining.IdealSize);

            _pair = new BasicMLDataPair(input, ideal);
        }
        public IEnumerable <RecognizedSoundModel> Recognize(double[] soundData)
        {
            var input  = new BasicMLData(soundData, false);
            var output = _network.Compute(input);

            var result = new List <RecognizedSoundModel>();

            for (int i = 0; i < output.Count; i++)
            {
                result.Add(new RecognizedSoundModel
                {
                    Match       = output[i],
                    LabelNumber = i
                });
            }

            return(result);
        }
        public static IMLDataSet GenerateSingleDataRange(EncogFunction task, double start, double stop, double step)
        {
            BasicMLDataSet result  = new BasicMLDataSet();
            double         current = start;


            while (current <= stop)
            {
                BasicMLData input = new BasicMLData(1);
                input[0] = current;
                BasicMLData ideal = new BasicMLData(1);
                ideal[0] = task(current);
                result.Add(input, ideal);
                current += step;
            }

            return(result);
        }
Example #8
0
        private void button1_Click(object sender, EventArgs e)
        {
            chart1.Series["Prueba"].Points.Clear();
            chart2.Series["Prueba"].Points.Clear();
            chart3.Series["Prueba"].Points.Clear();
            Entrada = new double[3] {
                trackBar1.Value, trackBar2.Value, trackBar3.Value
            };
            IMLData EntradaNeurona = new BasicMLData(Entrada);
            IMLData Resultado      = Red.Compute(EntradaNeurona);

            chart1.Series["Prueba"].Points.AddXY(Entrada[0], Entrada[1]);
            chart2.Series["Prueba"].Points.AddXY(Entrada[0], Entrada[2]);
            chart3.Series["Prueba"].Points.AddXY(Entrada[1], Entrada[2]);
            pictureBox1.BackColor = Color.FromArgb(Convert.ToInt32(Entrada[0]), Convert.ToInt32(Entrada[1]), Convert.ToInt32(Entrada[2]));
            if (Resultado[0] > 0.9 && Resultado[1] < 0.1 && Resultado[2] < 0.1)
            {
                label1.Text = "Es color rojo con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
            else if (Resultado[0] > 0.9 && Resultado[1] > 0.9 && Resultado[2] > 0.9)
            {
                label1.Text = "Es color naranja con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
            else if (Resultado[0] > 0.9 && Resultado[1] > 0.9 && Resultado[2] < 0.1)
            {
                label1.Text = "Es color amarillo con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
            else if (Resultado[0] < 0.1 && Resultado[1] > 0.9 && Resultado[2] < 0.1)
            {
                label1.Text = "Es color verde con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
            else if (Resultado[0] < 0.1 && Resultado[1] < 0.1 && Resultado[2] > 0.9)
            {
                label1.Text = "Es color azul con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
            else if (Resultado[0] < 0.1 && Resultado[1] < 0.1 && Resultado[2] < 0.1)
            {
                label1.Text = "Es color café con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
            else
            {
                label1.Text = "Indeterminado con un valor de \nR:" + Resultado[0] + "\nG:" + Resultado[1] + "\nB:" + Resultado[2];
            }
        }
        public TimeSeries Forecast(SupportVectorMachine network, NormalizeArray norm, TimeSeries simulatedData, List <DateTime> futureTimes)
        {
            int data_count        = simulatedData.Count;
            int future_data_count = futureTimes.Count;

            double[] data = new double[data_count + future_data_count];

            for (int idx = 0; idx < data_count; ++idx)
            {
                data[idx] = simulatedData[idx];
            }
            for (int idx = 0; idx < future_data_count; ++idx)
            {
                data[data_count + idx] = 0;
            }

            TimeSeries ts        = new TimeSeries();
            double     input_val = 0;

            for (int idx = 0; idx < future_data_count; ++idx)
            {
                var input = new BasicMLData(WindowSize);
                for (var i = 0; i < WindowSize; i++)
                {
                    int idx2 = (data_count + idx - WindowSize) + i;
                    if (idx2 < 0)
                    {
                        input_val = 0;
                    }
                    else
                    {
                        input_val = norm.Stats.Normalize(data[idx2]);
                    }
                    input[i] = input_val;
                }
                IMLData output     = network.Compute(input);
                double  prediction = norm.Stats.DeNormalize(output[0]);
                data[data_count + idx] = prediction;
                ts.Add(futureTimes[idx], prediction, false);
            }

            return(ts);
        }
Example #10
0
        public TimeSeries Forecast(BasicNetwork network, double scale_factor, TimeSeries simulatedData, List <DateTime> futureTimes)
        {
            int data_count        = simulatedData.Count;
            int future_data_count = futureTimes.Count;

            double[] data = new double[data_count + future_data_count];

            for (int idx = 0; idx < data_count; ++idx)
            {
                data[idx] = simulatedData[idx];
            }
            for (int idx = 0; idx < future_data_count; ++idx)
            {
                data[data_count + idx] = 0;
            }

            TimeSeries ts        = new TimeSeries();
            double     input_val = 0;

            for (int idx = 0; idx < future_data_count; ++idx)
            {
                var input = new BasicMLData(WindowSize);
                for (var i = 0; i < WindowSize; i++)
                {
                    int idx2 = (data_count + idx - WindowSize) + i;
                    if (idx2 < 0)
                    {
                        input_val = 0;
                    }
                    else
                    {
                        input_val = data[idx2] / scale_factor;
                    }
                    input[i] = input_val;
                }
                IMLData output     = network.Compute(input);
                double  prediction = output[0] * scale_factor;
                data[data_count + idx] = prediction;
                ts.Add(futureTimes[idx], prediction, false);
            }

            return(ts);
        }
        /// <summary>
        /// Compute the output for the given input.
        /// </summary>
        ///
        /// <param name="input">The input to the SVM.</param>
        /// <returns>The results from the SVM.</returns>
        public IMLData Compute(IMLData input)
        {
            if (_model == null)
            {
                throw new EncogError(
                          "Can't use the SVM yet, it has not been trained, "
                          + "and no model exists.");
            }

            IMLData result = new BasicMLData(1);

            svm_node[] formattedInput = MakeSparse(input);

            double d = svm.svm_predict(_model, formattedInput);

            result[0] = d;

            return(result);
        }
        /// <inheritdoc/>
        public IMLDataPair this[int x]
        {
            get
            {
                var input = new double[InputSize];
                var ideal = new double[IdealSize];

                _egb.SetLocation(x);
                _egb.Read(input);
                _egb.Read(ideal);

                var inputData = new BasicMLData(input, false);
                var idealData = new BasicMLData(ideal, false);

                var result = new BasicMLDataPair(inputData, idealData);
                result.Significance = _egb.Read();
                return(result);
            }
        }
        public int ScorePilot()
        {
            var sim = new LanderSimulator();

            while (sim.Flying)
            {
                var input = new BasicMLData(3);
                input[0] = _fuelStats.Normalize(sim.Fuel);
                input[1] = _altitudeStats.Normalize(sim.Altitude);
                input[2] = _velocityStats.Normalize(sim.Velocity);
                IMLData output = _network.Compute(input);
                double  value  = output[0];

                bool thrust;

                if (value > 0)
                {
                    thrust = true;
                    if (_track)
                    {
                        Console.WriteLine(@"THRUST");
                    }
                }
                else
                {
                    thrust = false;
                }

                sim.Turn(thrust);
                if (_track)
                {
                    Console.WriteLine(sim.Telemetry());
                }
            }

            CycleCount++;

            Outputs.Add(new EncogSimOut {
                Session = CycleCount, Score = sim.Score
            });

            return(sim.Score);
        }
Example #14
0
        public int test(double[] inputData)
        {
            // test the neural network
            IMLData input = new BasicMLData(inputData);

            Console.WriteLine(@"Neural Network Results:");

            IMLData output = network.Compute(input);

            Console.WriteLine("Input: ");
            for (int i = 0; i < inputData.Length; i++)
            {
                Console.WriteLine(inputData[i]);
            }
            Console.WriteLine("Output = " + output[0]);
            EncogFramework.Instance.Shutdown();

            return((int)Math.Round(output[0], MidpointRounding.AwayFromZero));
        }
Example #15
0
            public static void predict(SupportVectorMachine network)
            {
                Console.WriteLine(@"Year \t Actual\t Predict\t Closed Loops");

                for (int year = EVALUATE_START; year < EVALUATE_END; year++)
                {
                    // calculate based on actual data
                    IMLData input = new BasicMLData(WINDOW_SIZE);
                    for (int i = 0; i < input.Count; i++)
                    {
                        input.Data[i] = normalizedSunspots[(year - WINDOW_SIZE) + i];
                        //input.setData(i,this.normalizedSunspots[(year-WINDOW_SIZE)+i]);
                    }
                    IMLData output     = network.Compute(input);
                    double  prediction = output.Data[0];
                    closedLoopSunspots[year] = prediction;

                    // calculate "closed loop", based on predicted data
                    for (int i = 0; i < input.Count; i++)
                    {
                        input.Data[i] = closedLoopSunspots[(year - WINDOW_SIZE) + i];
                        //input.setData(i,this.closedLoopSunspots[(year-WINDOW_SIZE)+i]);
                    }
                    output = network.Compute(input);
                    double closedLoopPrediction = output[0];



                    // display
                    //System.out.println((STARTING_YEAR+year)
                    //        +"\t"+f.format(this.normalizedSunspots[year])
                    //        +"\t"+f.format(prediction)
                    //        +"\t"+f.format(closedLoopPrediction)

                    Console.WriteLine(((STARTING_YEAR + year)
                                       + @"\t " + Format.FormatDouble(SUNSPOTS[year], 4)
                                       + @"\t " + Format.FormatDouble(normalizedSunspots[year], 4)
                                       + @"\t " + Format.FormatDouble(prediction, 4)
                                       + @"\t " + Format.FormatDouble(closedLoopPrediction, 4)
                                       ));
                }
            }
Example #16
0
        public static BasicMLData getData(Pond pond)
        {
            var input = new BasicMLData(inputSize);

            input[0] = location.Normalize(-pond.sharkPos.X + pond.fishPos.X);
            input[1] = location.Normalize(-pond.sharkPos.Y + pond.fishPos.Y);

            /* input[2] = location.Normalize(pond.sharkPos.X - pond.fishPos.X);
             * input[3] = location.Normalize(pond.sharkPos.Y - pond.fishPos.Y);
             * input[4] = speed.Normalize(pond.fishVel.X - pond.sharkVel.X);
             * input[5] = speed.Normalize(pond.fishVel.Y - pond.sharkVel.Y);
             * input[6] = speed.Normalize(pond.sharkVel.X - pond.fishVel.X);
             * input[7] = speed.Normalize(pond.sharkVel.Y - pond.sharkVel.Y);
             * input[8] = pond.lastFish.X;
             * input[9] = pond.lastFish.Y;
             * input[10] = pond.lastShark.X;
             * input[11] = pond.lastShark.Y;*/

            return(input);
        }
Example #17
0
        /// <inheritdoc />
        public IMLDataPair this[int index]
        {
            get
            {
                if (index > Count)
                {
                    return(null);
                }

                var input = new BasicMLData(
                    CalculatedInputSize * CalculateLagCount());
                var ideal = new BasicMLData(
                    CalculatedIdealSize * CalculateLeadCount());
                IMLDataPair pair = new BasicMLDataPair(input, ideal);

                GetRecord(index, pair);

                return(pair);
            }
        }
        /// <summary>
        /// Analyze the data. This counts the records and prepares the data to be
        /// processed.
        /// </summary>
        ///
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file to analyze.</param>
        /// <param name="headers">True, if the input file has headers.</param>
        /// <param name="format">The format of the input file.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename      = inputFile;
            ExpectInputHeaders = headers;
            Format             = format;

            Analyzed = true;
            _analyst = theAnalyst;

            _data = new BasicMLDataSet();
            ResetStatus();
            int recordCount = 0;

            int outputLength = _analyst.DetermineTotalColumns();
            var csv          = new ReadCSV(InputFilename.ToString(),
                                           ExpectInputHeaders, Format);

            ReadHeaders(csv);

            _analystHeaders = new CSVHeaders(InputHeadings);

            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(true);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(
                    _analyst, _analystHeaders, csv, outputLength, true);

                IMLData input = new BasicMLData(inputArray);
                _data.Add(new BasicMLDataPair(input));

                recordCount++;
            }
            RecordCount = recordCount;
            Count       = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
Example #19
0
        /// <summary>
        /// Called to notify the indicator that a bar has been received.
        /// </summary>
        /// <param name="packet">The packet received.</param>
        public override void NotifyPacket(IndicatorPacket packet)
        {
            long when = long.Parse(packet.Args[0]);

            if (_method == null)
            {
                if (_holder.Record(when, 2, packet.Args))
                {
                    _rowsDownloaded++;
                }
            }
            else
            {
                var input = new BasicMLData(Config.PredictWindow);

                const int fastIndex = 2;
                const int slowIndex = fastIndex + Config.InputWindow;

                for (int i = 0; i < 3; i++)
                {
                    double fast = CSVFormat.EgFormat.Parse(packet.Args[fastIndex + i]);
                    double slow = CSVFormat.EgFormat.Parse(packet.Args[slowIndex + i]);
                    double diff = _fieldDifference.Normalize((fast - slow) / Config.PipSize);
                    input[i] = _fieldDifference.Normalize(diff);
                }

                IMLData result = _method.Compute(input);

                double d = result[0];
                d = _fieldOutcome.DeNormalize(d);

                String[] args =
                {
                    "?",                                                           // line 1
                    "?",                                                           // line 2
                    CSVFormat.EgFormat.Format(d, EncogFramework.DefaultPrecision), // bar 1
                };                                                                 // arrow 2

                Link.WritePacket(IndicatorLink.PacketInd, args);
            }
        }
Example #20
0
        public static BasicNetwork setup(BasicNetwork net)
        {
            Pond.Pond pond = new Pond.Pond(null, null);

            List <double[]> inputData  = new List <double[]>();
            List <double[]> outputData = new List <double[]>();

            for (int i = 0; i < 600; i++)
            {
                double[]    d    = new double[8];
                BasicMLData data = pond.getInputData();

                for (int j = 0; j < 8; j++)
                {
                    d[j] = data[j];
                }

                Vector2  vect = pond.getLineTarget();
                double[] e    = new double[2] {
                    (double)vect.X, (double)vect.Y
                };

                inputData.Add(d);
                outputData.Add(e);
            }

            BasicMLDataSet trainingSet = new BasicMLDataSet(inputData.ToArray(), outputData.ToArray());

            ResilientPropagation train = new ResilientPropagation(net, trainingSet);

            int epoch = 0;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            return(net);
        }
        /// <summary>
        /// Generate a random sequence.
        /// </summary>
        /// <returns>The random element.</returns>
        public IMLDataPair Generate()
        {
            var result = new BasicMLData(_probabilities.Length);

            for (int i = 0; i < _probabilities.Length; i++)
            {
                double rand = ThreadSafeRandom.NextDouble();

                result[i] = _probabilities[i].Length - 1;
                for (int j = 0; j < (_probabilities[i].Length - 1); j++)
                {
                    if ((rand -= _probabilities[i][j]) < 0.0)
                    {
                        result[i] = j;
                        break;
                    }
                }
            }

            return(new BasicMLDataPair(result));
        }
        public void display()
        {
            double[] present      = new double[INPUT_SIZE * 2];
            double[] actualOutput = new double[OUTPUT_SIZE];

            int index = 0;

            foreach (FinancialSample sample in actual.getSamples())
            {
                if (sample.getDate().CompareTo(PREDICT_FROM) > 0)
                {
                    StringBuilder str = new StringBuilder();
                    str.Append(sample.getDate());
                    str.Append(":Start=");
                    str.Append(sample.getAmount());

                    actual.getInputData(index - INPUT_SIZE, present);
                    actual.getOutputData(index - INPUT_SIZE, actualOutput);
                    IMLData data = new BasicMLData(present);

                    IMLData  Output  = network.Compute(data);
                    double[] predict = Output.Data;

                    str.Append(",Actual % Change=");
                    str.Append(actualOutput[0].ToString("N2"));
                    str.Append(",Predicted % Change= ");
                    str.Append(predict[0].ToString("N2"));

                    str.Append(":Difference=");

                    ErrorCalculation error = new ErrorCalculation();
                    error.UpdateError(Output.Data, actualOutput, 1);
                    str.Append(error.CalculateRMS().ToString("N2"));
                    //
                    Console.WriteLine(str.ToString());
                }

                index++;
            }
        }
Example #23
0
        private void button2_Click(object sender, EventArgs e)
        {
            double[][]     x = { new double[] { 0.1, 0.2 },
                                 new double[]     { 0.4, 0.3 } };
            double[][]     y = { new double[] { 0.3 },
                                 new double[]     { 0.7 } };
            BasicMLDataSet dataset = new BasicMLDataSet(x, y);

            BasicNetwork rede = new BasicNetwork();

            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            rede.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            rede.Structure.FinalizeStructure();
            rede.Reset();

            Backpropagation train = new Backpropagation(rede, dataset, 0.5, 0.1);

            int epoch = 0;

            do
            {
                train.Iteration();
                if (epoch % 100 == 0)
                {
                    richTextBox2.AppendText("Época " + epoch.ToString() + " Erro " + train.Error + Environment.NewLine);
                }
                epoch++;
            } while (epoch < 3000);

            for (double t = 0.0; t <= 5; t += 0.05)
            {
                double[] d      = new double[] { t, t };
                IMLData  input  = new BasicMLData(d);
                IMLData  output = rede.Compute(input);
                double[] result = new double[output.Count];
                output.CopyTo(result, 0, output.Count);
                richTextBox2.AppendText(" " + t + "+" + t + "=" + result[0] + Environment.NewLine);
            }
        }
Example #24
0
        public static void Go()
        {
            var encog = new EncogPersistedCollection("market-network.dat", FileMode.Open);

            Console.WriteLine(@"Loading network");
            var network = (BasicNetwork)encog.Find("market-network");

            Console.WriteLine(@"Reading current data from db");
            var market = new StockMarket();

            market.Init(false);
            var data = market.GetCurrentData();

            Console.WriteLine(@"Running network on data");

            var madness = new ModelMadness();

            foreach (StockMarket.WorkableStockInfo info in data)
            {
                var input       = InputOutputMadness.CreateInputs(info);
                var neuralInput = new BasicMLData(input);
                var output      = network.Compute(neuralInput);

                Console.WriteLine(@"Stock " + info.ViewToday.stock + @" will change " + output[0] + @"% in the next 20 trading days");

                var future = new prediction
                {
                    day            = DateTime.Now.Date,
                    C20_Days_Close = 100 * (decimal)output[0],
                    stock          = info.ViewToday.stock
                };

                madness.AddTopredictions(future);
            }

            madness.SaveChanges();

            Console.WriteLine(@"Done - begin making $millions");
        }
Example #25
0
        public void Predict()
        {
            double error = 0;
            int    c     = 0;

            var d     = new DenseMatrix(2, _normalizedPredictionData.Length - WindowSize);
            int count = 0;

            for (int i = WindowSize; i < _normalizedPredictionData.Length; i++)
            {
                // calculate based on actual data
                IMLData input = new BasicMLData(WindowSize);
                for (int j = 0; j < input.Count; j++)
                {
                    input.Data[j] = _normalizedPredictionData[(i - WindowSize) + j];
                }

                IMLData output     = network.Compute(input);
                double  prediction = output.Data[0];


                error += Math.Pow((normalizeArray.Stats.DeNormalize(prediction) - predictionData[i]) / predictionData[i],
                                  2);
                c++;
                d[0, count] = predictionData[i];
                d[1, count] = normalizeArray.Stats.DeNormalize(prediction);
                count++;
            }

            error /= c;
            error  = Math.Pow(error, .5);
            Console.WriteLine(error);

            OutputData = d.Row(1).ToArray();

            string[] symbols = { "actual", "predicted" };
            Visualize.GeneratePredictionGraph(symbols, d, new DateTime(), new TimeSpan(24, 0, 0),
                                              QSConstants.DEFAULT_DATA_FILEPATH + write + ".html");
        }
Example #26
0
        public SOMColors()
        {
            InitializeComponent();

            this.network  = CreateNetwork();
            this.gaussian = new NeighborhoodRBF(RBFEnum.Gaussian, SOMColors.WIDTH, SOMColors.HEIGHT);
            this.train    = new BasicTrainSOM(this.network, 0.01, null, gaussian);

            train.ForceWinner = false;

            samples = new List <IMLData>();
            for (int i = 0; i < 15; i++)
            {
                IMLData data = new BasicMLData(3);
                data.Data[0] = RangeRandomizer.Randomize(-1, 1);
                data.Data[1] = RangeRandomizer.Randomize(-1, 1);
                data.Data[2] = RangeRandomizer.Randomize(-1, 1);
                samples.Add(data);
            }

            this.train.SetAutoDecay(100, 0.8, 0.003, 30, 5);
        }
Example #27
0
        /// <summary>
        /// Trains the neural network with the passed in training set.
        /// Receive a list of tuple, where each tuple represent
        /// Tuple = (State, ExpectedValueForBuy, ExpectedValueForSell, ExpectedValueForWait)
        /// </summary>
        /// <param name="trainingSet">The training set.</param>
        public void Train(IList <Tuple <State, double[]> > trainingSet)
        {
            var trainingData = new List <IMLDataPair>();

            foreach (var sample in trainingSet)
            {
                var flattenState = sample.Item1.ToArray();
                var actuals      = new BasicMLData(flattenState);
                var ideals       = new BasicMLData(sample.Item2);

                trainingData.Add(new BasicMLDataPair(actuals, ideals));
            }

            IMLDataSet dataSet = new BasicMLDataSet(trainingData);
            //IMLTrain train = new Backpropagation(NeuralNetwork, dataSet, Parameters.LearningRate, Parameters.LearningMomemtum);
            IMLTrain train = new ResilientPropagation(NeuralNetwork, dataSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                epoch++;

                OnTrainingEpochComplete?.Invoke(this, new OnTrainingEpochCompleteArgs()
                {
                    Epoch = epoch,
                    Error = train.Error
                });
            } while (train.Error > Parameters.TrainingError && epoch < Parameters.MaxIterationPerTrainging);


            //foreach (var item in dataSet)
            //{
            //    var output = NeuralNetwork.Compute(item.Input);
            //    Console.WriteLine("output: {0} - {1} - {2} | ideal: {3} - {4} - {5}", output[0], output[1], output[2], item.Ideal[0], item.Ideal[1], item.Ideal[2]);
            //}
        }
Example #28
0
        public void Predict()
        {
            double error = 0;
            int    c     = 0;

            var d     = new DenseMatrix(2, _normalizedPredictionData.Length);
            int count = 0;

            for (int i = 0; i < _normalizedPredictionData.Length; i++)
            {
                // calculate based on actual data
                IMLData input = new BasicMLData(inputs);
                for (int j = 0; j < input.Count; j++)
                {
                    input.Data[j] = _normalizedTrainingData[j, i];
                }

                IMLData output     = network.Compute(input);
                double  prediction = output.Data[0];


                error +=
                    Math.Pow(
                        (normalizeArrayOutput.Stats.DeNormalize(prediction) - predictionData[i]) / predictionData[i], 2);
                c++;
                d[0, count] = predictionData[i];
                d[1, count] = normalizeArrayOutput.Stats.DeNormalize(prediction);
                count++;
            }

            error /= c;
            error  = Math.Pow(error, .5);
            Console.WriteLine(error);

            string[] symbols = { "actual", "predicted" };
            Visualize.GeneratePredictionGraph(symbols, d, new DateTime(), new TimeSpan(24, 0, 0),
                                              "C:\\Sangar\\resultfinal.html");
        }
        public void Run()
        {
            //Se crea la red neuronal con sus respectivas capas
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.Structure.FinalizeStructure();
            network.Reset();

            //Crear el conjunto de entrenamiento
            IMLDataSet conjuntoEntrenamiento = new BasicMLDataSet(entradas, salidas);

            //Entrenar
            IMLTrain train = new ResilientPropagation(network, conjuntoEntrenamiento);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoca #" + epoch + " Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.001);

            //Prueba de la red neuronal
            Console.WriteLine("Resultados:");
            foreach (IMLDataPair pair in conjuntoEntrenamiento)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + "," + output[1] + @",ideal=" + pair.Ideal[0] + "," + pair.Ideal[1]);
            }

            IMLData dataprueba = new BasicMLData(new double[] { 2.4, 2.5 });
            var     prueba     = network.Compute(dataprueba);
        }
        /// <summary>
        /// Used to map neurons to actual letters.
        /// </summary>
        /// <returns>The current mapping between neurons and letters as an array.</returns>
        public char[] MapNeurons()
        {
            char[] map = new char[this.letters.Items.Count];

            for (int i = 0; i < map.Length; i++)
            {
                map[i] = '?';
            }
            for (int i = 0; i < this.letters.Items.Count; i++)
            {
                BasicMLData input = new BasicMLData(OCRForm.DOWNSAMPLE_HEIGHT * OCRForm.DOWNSAMPLE_WIDTH);
                char        ch    = ((string)(this.letters.Items[i]))[0];
                bool[]      data  = this.letterData[ch];
                for (int j = 0; j < input.Count; j++)
                {
                    input[j] = data[j] ? 0.5 : -0.5;
                }

                int best = this.network.Winner(input);
                map[best] = ch;
            }
            return(map);
        }