Beispiel #1
0
        private double CalculateClassificationError(BasicNeuralDataSet trainingSet)
        {
            int errorCount = 0;

            foreach (var trainData in trainingSet)
            {
                IMLData output = _network.Compute(trainData.Input);
                IMLData ideal  = trainData.Ideal;

                double maxValue = Double.MinValue;
                int    maxIndex = 0;

                for (int i = 0; i < output.Count; ++i)
                {
                    if (maxValue < output[i])
                    {
                        maxValue = output[i];
                        maxIndex = i;
                    }
                }

                if (Math.Abs(ideal[maxIndex] - 1) > 0.0001)
                {
                    errorCount++;
                }
            }

            return((double)errorCount / trainingSet.Count);
        }
Beispiel #2
0
        public void Predict(BasicNetwork network)
        {
            Console.WriteLine(@"Year    Actual    Predict     Closed Loop     Predict    Denormalized Value   Real Value");

            for (var year = EvaluateStart; year < EvaluateEnd; year++)
            {
                // calculate based on actual data
                var input = new BasicMLData(WindowSize);
                for (var i = 0; i < input.Count; i++)
                {
                    input[i] = _normalizedForexPair[(year - WindowSize) + i];
                }
                IMLData output     = network.Compute(input);
                var     prediction = output[0];
                _closedLoopForexPair[year] = prediction;

                // calculate "closed loop", based on predicted data
                for (var i = 0; i < input.Count; i++)
                {
                    input[i] = _closedLoopForexPair[(year - WindowSize) + i];
                }
                output = network.Compute(input);
                var closedLoopPrediction = output[0];

                // display
                Console.WriteLine("{0}  {1}  {2}  {3} Accuracy:{4} Denormalized:{5} Real value:{6}",
                                  (StartingYear + year),
                                  Format.FormatDouble(_normalizedForexPair[year], 5),
                                  Format.FormatDouble(prediction, 5),
                                  Format.FormatDouble(closedLoopPrediction, 5),
                                  Format.FormatDouble(_normalizedForexPair[year] - prediction, 5),
                                  array.Stats.DeNormalize(prediction),
                                  ForexPair[year]);
            }
        }
Beispiel #3
0
        private static void ZrobKlasyfikacje()
        {
            DaneKlasyfikacja doNauki = new DaneKlasyfikacja();

            doNauki.Wczytaj(sciezkaKlasyfikacjaTreningowe);

            BasicNetwork siec        = UtworzSiecDoKlasyfikacji();
            IMLDataSet   dataTrening = UczSiec(siec, doNauki);

            // testuje siec na danych treningowych
            for (int i = 0; i < dataTrening.Count; i++)
            {
                IMLData wynik = siec.Compute(dataTrening[i].Input);
                doNauki.klasyWy.Add(wynik[0]);
            }

            doNauki.EksportujDoPliku(sciezkaKlasyfikacjaTreningoweWyniki);

            // testuje siec na danych testowych
            DaneKlasyfikacja doTestow = new DaneKlasyfikacja();

            doTestow.Wczytaj(sciezkaKlasyfikacjaTestowe);
            IMLDataSet dataTest = new BasicMLDataSet(doTestow.punkty.ToArray(), doTestow.klasyWej.ToArray());

            for (int i = 0; i < dataTest.Count; i++)
            {
                IMLData wynik = siec.Compute(dataTest[i].Input);
                doTestow.klasyWy.Add(wynik[0]);
            }
            doTestow.EksportujDoPliku(sciezkaKlasyfikacjaTestoweWyniki);
        }
Beispiel #4
0
        private static void ZrobRegresje()
        {
            DaneRegresja doNauki = new DaneRegresja();

            doNauki.Wczytaj(sciezkaRegresjaTreningowe);

            BasicNetwork siec        = UtworzSiecDoRegresji();
            IMLDataSet   dataTrening = UczSiec(siec, doNauki);

            // testuje siec na danych treningowych
            for (int i = 0; i < dataTrening.Count; i++)
            {
                IMLData wynik = siec.Compute(dataTrening[i].Input);
                doNauki.otrzymaneY.Add(wynik[0]);
            }

            doNauki.EksportujDoPliku(sciezkaRegresjaTreningoweWyniki);

            // testuje siec na danych testowych
            DaneRegresja doTestow = new DaneRegresja();

            doTestow.Wczytaj(sciezkaRegresjaTestowe);
            IMLDataSet dataTest = new BasicMLDataSet(doTestow.wejscioweX.ToArray(), doTestow.oczekiwaneY.ToArray());

            for (int i = 0; i < dataTest.Count; i++)
            {
                IMLData wynik = siec.Compute(dataTest[i].Input);
                doTestow.otrzymaneY.Add(wynik[0]);
            }
            doTestow.EksportujDoPliku(sciezkaRegresjaTestoweWyniki);
        }
Beispiel #5
0
        public void Predict(BasicNetwork network)
        {
            Console.WriteLine(@"Year    Actual  Predict Closed Loop Predict");

            for (int year = EvaluateStart; year < EvaluateEnd; year++)
            {
                // calculate based on actual data
                var input = new BasicMLData(WindowSize);
                for (var i = 0; i < input.Count; i++)
                {
                    input[i] = _normalizedSunspots[(year - WindowSize) + i];
                }
                IMLData output     = network.Compute(input);
                double  prediction = output[0];
                _closedLoopSunspots[year] = prediction;

                // calculate "closed loop", based on predicted data
                for (var i = 0; i < input.Count; i++)
                {
                    input[i] = _closedLoopSunspots[(year - WindowSize) + i];
                }
                output = network.Compute(input);
                double closedLoopPrediction = output[0];

                // display
                Console.WriteLine((StartingYear + year)
                                  + @"  " + Format.FormatDouble(_normalizedSunspots[year], 2)
                                  + @"  " + Format.FormatDouble(prediction, 2)
                                  + @"  " + Format.FormatDouble(closedLoopPrediction, 2));
            }
        }
        public void Predict(BasicNetwork network)
        {
            Console.WriteLine(@"Year    Actual    Predict     Closed Loop     Predict    Denormalized Value   Real Value");

            for (int year = EvaluateStart; year < EvaluateEnd; year++)
            {
                // calculate based on actual data
                IMLData input = new BasicMLData(WindowSize);
                for (var i = 0; i < input.Count; i++)
                {
                    input.Data[i] = _normalizedSunspots[(year - WindowSize) + i];
                }
                IMLData output     = network.Compute(input);
                double  prediction = output.Data[0];
                _closedLoopSunspots[year] = prediction;

                // calculate "closed loop", based on predicted data
                for (var i = 0; i < input.Count; i++)
                {
                    input.Data[i] = _closedLoopSunspots[(year - WindowSize) + i];
                }
                output = network.Compute(input);
                double closedLoopPrediction = output.Data[0];

                // display
                Console.WriteLine((StartingYear + year)
                                  + @"  " + Format.FormatDouble(_normalizedSunspots[year], 5)
                                  + @"  " + Format.FormatDouble(prediction, 5)
                                  + @"  " + Format.FormatDouble(closedLoopPrediction, 5)
                                  + @" Accuracy:" +
                                  Format.FormatDouble(_normalizedSunspots[year] - prediction, 5)
                                  + " Denormalized:" + array.Stats.DeNormalize(prediction)
                                  + " Real value:" + Sunspots[year]);
            }
        }
Beispiel #7
0
    /// <summary>
    /// Returns the prediction of the network for a given input.
    /// </summary>
    /// <param name="dataSource"></param>
    /// <returns></returns>
    public double[] getNetworkPrediction(NNDataSource dataSource)
    {
      double[] networkOutputs = new double[OUTPUT_NEURONS];
      double[] networkInputs = new double[INPUT_NEURONS];
      dataSource.returnInput(ref networkInputs);

      network.Compute(networkInputs, networkOutputs);

      if (networkOutputs.Length != OUTPUT_NEURONS)
        throw new Exception("Unexpected number of network ouputs.");

      return networkOutputs;
    }
Beispiel #8
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Beispiel #9
0
        /// <summary>
        ///     Process one training set element.
        /// </summary>
        /// <param name="errorCalc">The error calculation to use.</param>
        /// <param name="input">The network input.</param>
        /// <param name="ideal">The ideal values.</param>
        public void Process(IErrorCalculation errorCalc, double[] input, double[] ideal)
        {
            _network.Compute(input, _actual);

            errorCalc.UpdateError(_actual, ideal, 1.0);

            // Calculate error for the output layer.
            var outputLayerIndex = _network.Layers.Count - 1;
            var outputActivation = _network.Layers[outputLayerIndex].Activation;

            errorFunction.CalculateError(
                outputActivation, _layerSums, _layerOutput,
                ideal, _actual, _layerDelta, 0, 1.0);

            // Apply regularization, if requested.
            if (_owner.L1 > AIFH.DefaultPrecision ||
                _owner.L1 > AIFH.DefaultPrecision)
            {
                var lp = new double[2];
                CalculateRegularizationPenalty(lp);
                for (var i = 0; i < _actual.Length; i++)
                {
                    var p = lp[0] * _owner.L1 + lp[1] * _owner.L2;
                    _layerDelta[i] += p;
                }
            }

            // Propagate backwards (chain rule from calculus).
            for (var i = _network.Layers.Count - 1; i > 0; i--)
            {
                var layer = _network.Layers[i];
                layer.ComputeGradient(this);
            }
        }
Beispiel #10
0
        static void Main(string[] args)
        {
            INeuralDataSet trainingSet = new BasicNeuralDataSet(AndInput, AndIdeal);
            var            network     = new BasicNetwork();

            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            ITrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine($"Epoch no {epoch}. Error: {train.Error}");
                epoch++;
            } while ((epoch < MaxEpoch) && (train.Error > AcceptableError));

            Console.WriteLine("\nAnd function Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine($"{pair.Input[0]} AND {pair.Input[1]} should be: {pair.Ideal[0]} actual value is: {output[0]}");
            }


            Console.ReadKey();
        }
Beispiel #11
0
        static void Main(string[] args)
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 4));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5));
            network.Structure.FinalizeStructure();
            network.Reset();
            IMLDataSet trainingSet = new BasicMLDataSet(SensoriInput, AttuatoriOutput);
            IMLTrain   train       = new ResilientPropagation(network, trainingSet);
            int        epoch       = 1;

            do
            {
                /* Avvia la redistribuzione dei pesi */
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.001); /* Itera finche' non viene raggiunto un errore tollerabile */

            /* Test la MLP */
            Console.WriteLine("\r\n+------------------------------------+");
            Console.WriteLine("|Neural Network Results:             |");
            Console.WriteLine("+------------------------------------+");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine("Input:" + pair.Input[0] + " - " + pair.Input[1] + " - " + pair.Input[2] + " - " + pair.Input[3]
                                  + "\tactual=" + Math.Round(output[0], 2) + " - " + Math.Round(output[1], 2) + " - " + Math.Round(output[2], 2) + " - " + Math.Round(output[3], 2) + " - " + Math.Round(output[4], 2)
                                  + "\tideal=" + pair.Ideal[0] + " - " + pair.Ideal[1] + " - " + pair.Ideal[2] + " - " + pair.Ideal[3] + " - " + pair.Ideal[4]);
            }
            Console.Read();
        }
        public LasPoint.ClassificationType[] Classify(LasFile file)
        {
            Stopwatch           sw     = Stopwatch.StartNew();
            LasPointDataRecords points = file.LasPointDataRecords;

            LasPoint.ClassificationType[] output = new LasPoint.ClassificationType[points.Count];
            Statistics stats = new Statistics();

            stats.Count = points.Count;
            for (int i = 0; i < points.Count; i++)
            {
                LasPoint3Short point   = (LasPoint3Short)points[i];
                double         green   = point.Green - (point.Red + point.Blue) / 2;
                IMLData        classed = Network.Compute(
                    new BasicMLData(new double[] { file.LasHeader.ScaleZ(point.Z), point.Intensity, green }));
                output[i] = Utills.QuickClassess[classed.IndexOfMax()];
                if (output[i] != points[i].Classification)
                {
                    stats.ClassErrors[(int)points[i].Classification]++;
                }
                stats.PredictionMatrix[(int)points[i].Classification, (int)output[i]]++;
                stats.ClassCount[(int)output[i]]++;
                stats.ClassRealCount[(int)points[i].Classification]++;
                if (i % 1000 == 0)
                {
                    Console.WriteLine(i);
                }
            }
            Console.Write(stats.ToString());
            sw.Stop();
            Console.WriteLine("Czas trwania [" + sw.Elapsed.TotalSeconds.ToString() + "s]");
            stats.SaveMatrixAsCSV();
            return(output);
        }
Beispiel #13
0
        public void Run()
        {
            //Se crea la red neuronal con sus respectivas capas
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            //Crear el conjunto de entrenamiento
            IMLDataSet conjuntoEntrenamiento = new BasicMLDataSet(entradas, salidas);

            //Entrenar
            IMLTrain train = new ResilientPropagation(network, conjuntoEntrenamiento);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine("Epoca #" + epoch + " Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine("Resultados:");
            foreach (IMLDataPair pair in conjuntoEntrenamiento)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Beispiel #14
0
        static void Main(string[] args)
        {
            Console.Write("Enter network ID: ");
            int networkID = int.Parse(Console.ReadLine());

            FileInfo trainFile   = new FileInfo("dataset.egb");
            FileInfo networkFile = new FileInfo($"network{networkID}.nn");

            IMLDataSet   trainingSet = LoadDataSet(trainFile);
            BasicNetwork network     = LoadNetwork(networkFile);

            using (var p = Process.GetCurrentProcess())
                Console.WriteLine($"RAM usage: {p.WorkingSet64 / 1024 / 1024} MB.");

            foreach (var sample in trainingSet)
            {
                Console.WriteLine("------------");
                for (int i = 0; i < sample.Ideal.Count; i++)
                {
                    Console.Write(fmt, sample.Ideal[i]);
                }
                Console.WriteLine();
                //Console.WriteLine(sample.Ideal.ToString());
                IMLData res = network.Compute(sample.Input);
                for (int i = 0; i < sample.Ideal.Count; i++)
                {
                    Console.Write(fmt, res[i]);
                }
                Console.WriteLine();
                //Console.WriteLine(res.ToString());
                Console.ReadKey();
            }
        }
Beispiel #15
0
        protected override TimeSeries _BuildOutput(TimeSeries simulatedData, object userState = null)
        {
            if (mModel == null)
            {
                mModel = BuildModel(simulatedData, out mScaleFactor);
            }

            TimeSeries preds = new TimeSeries();

            for (int i = 0; i < simulatedData.Count; ++i)
            {
                double[] input = new double[mWindowSize];
                for (int j = 0; j < mWindowSize; ++j)
                {
                    int index = i - (mWindowSize - j);
                    if (index >= 0)
                    {
                        input[j] = simulatedData[index] / mScaleFactor;
                    }
                }

                double[] output = new double[1];
                mModel.Compute(input, output);
                preds.Add(simulatedData.TimeStamp(i), output[0] * mScaleFactor, false);
            }

            return(preds);
        }
        public TimeSeries Predict(BasicNetwork network, NormalizeArray norm, TimeSeries simulatedData)
        {
            double[] data = GenerateData(simulatedData);

            int        data_count = simulatedData.Count;
            TimeSeries ts         = new TimeSeries();
            double     input_val  = 0;

            for (int idx = 0; idx < data_count; ++idx)
            {
                var input = new BasicMLData(WindowSize);
                for (var i = 0; i < WindowSize; i++)
                {
                    int idx2 = (idx - WindowSize) + i;
                    if (idx2 < 0)
                    {
                        input_val = 0;
                    }
                    else
                    {
                        input_val = norm.Stats.Normalize(data[idx2]);
                    }
                    input[i] = input_val;
                }
                IMLData output     = network.Compute(input);
                double  prediction = norm.Stats.DeNormalize(output[0]);
                ts.Add(simulatedData.TimeStamp(idx), prediction, false);
            }

            return(ts);
        }
            double Evaluate(double[] input)
            {
                var output = new double[1];

                _network.Compute(input, output);
                return(output[0]);
            }
Beispiel #18
0
        public static void Main()
        {
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            INeuralDataSet trainingSet = new BasicNeuralDataSet(XorInput, XorIdeal);

            ITrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            var timer = Stopwatch.StartNew();
            do
            {
                train.Iteration();
                epoch++;
            } while ((epoch < 50000) && (train.Error > 0.0001));

            timer.Stop();

            Console.WriteLine("Neural Network Results:");
            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + "," + pair.Input[1]
                        + ", actual=" + output[0] + ", ideal=" + pair.Ideal[0]);
            }
            Console.WriteLine($"Completed {epoch} epochs in {timer.Elapsed} ({(float)timer.ElapsedMilliseconds / epoch} ms per epoch)");
            Console.ReadLine();
        }
Beispiel #19
0
        public static void Main(string[] args)
        {
            var data    = LoadData();
            var label   = LoadLabel();
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 4));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 8));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 3));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingSet = new BasicMLDataSet(data, label);

            network.Flat.Weights = LoadNetwork();
            //Train(network, trainingSet);
            //SaveNetwork(network);

            var sum = 0;

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine($"Input: {pair.Input} Actual: {output.ToOneHot()} Ideal: {pair.Ideal}");
                if (CompareMLData(output.ToOneHot(), pair.Ideal))
                {
                    sum++;
                }
            }

            Console.WriteLine($"Result = {((float)sum / trainingSet.Count) * 100}");
        }
        public void Run()
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            IMLTrain   train       = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch # " + epoch + @" Error: " + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            train.FinishTraining();

            Console.WriteLine(@"Neural Network Results: ");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @" , " + pair.Input[1] + @" , actual = " + output[0] + @" , ideal = " + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
        }
        public int IdentifyMagic(double[] raw_input)
        {
            List <double> input;
            double        max = 0;

            double[] op    = new double[4];
            int      index = 0;

            for (int i = 0; i < magics.Count; i++)
            {
                input = raw_input.ToList();
                input.AddRange(magics[i].baseline);
                IMLData inputData = new BasicMLData(input.ToArray());
                IMLData output;
                output = network.Compute(inputData);
                op[i]  = output[0];
                if (max < output[0])
                {
                    max   = output[0];
                    index = i;
                }
            }
            foreach (double o in op)
            {
                Debug.Log(o);
            }
            return((max > tolerance) ? index : -1);
        }
Beispiel #22
0
        public static double EvaluateNetworks(BasicNetwork network, BasicMLDataSet set)
        {
            int count   = 0;
            int correct = 0;

            foreach (IMLDataPair pair in set)
            {
                IMLData input       = pair.Input;
                IMLData actualData  = pair.Ideal;
                IMLData predictData = network.Compute(input);

                double actual  = actualData[0];
                double predict = predictData[0];
                double diff    = Math.Abs(predict - actual);

                Direction actualDirection  = DetermineDirection(actual);
                Direction predictDirection = DetermineDirection(predict);

                if (actualDirection == predictDirection)
                {
                    correct++;
                }
                count++;
                Console.WriteLine(@"Number" + @"count" + @": actual=" + Format.FormatDouble(actual, 4) + @"(" + actualDirection + @")"
                                  + @",predict=" + Format.FormatDouble(predict, 4) + @"(" + predictDirection + @")" + @",diff=" + diff);
            }
            double percent = correct / (double)count;

            Console.WriteLine(@"Direction correct:" + correct + @"/" + count);
            Console.WriteLine(@"Directional Accuracy:"
                              + Format.FormatPercent(percent));

            return(percent);
        }
Beispiel #23
0
        public List <PredictionResults> Predict(DateTime predictFrom, DateTime predictTo)
        {
            List <PredictionResults> results = new List <PredictionResults>();

            double[] present      = new double[InputTuples * IndexesToConsider];
            double[] actualOutput = new double[OutputSize];
            int      index        = 0;

            foreach (var sample in _manager.Samples)
            {
                if (sample.Date.CompareTo(predictFrom) > 0 && sample.Date.CompareTo(predictTo) < 0)
                {
                    var result = new PredictionResults();
                    _manager.GetInputData(index - InputTuples, present);
                    _manager.GetOutputData(index - InputTuples, actualOutput);
                    var data    = new BasicNeuralData(present);
                    var predict = _network.Compute(data);
                    result.ActualLotos    = actualOutput[0] * (_manager.MaxLotos - _manager.MinLotos) + _manager.MinLotos;
                    result.PredictedLotos = predict[0] * (_manager.MaxLotos - _manager.MinLotos) + _manager.MinLotos;
                    result.ActualPir      = actualOutput[1] * (_manager.MaxPrimeRate - _manager.MinPrimeRate) + _manager.MinPrimeRate;
                    result.PredictedPir   = predict[1] * (_manager.MaxPrimeRate - _manager.MinPrimeRate) + _manager.MinPrimeRate;
                    result.ActualOrlen    = actualOutput[2] * (_manager.MaxOrlen - _manager.MinOrlen) + _manager.MinOrlen;
                    result.PredictedOrlen = predict[2] * (_manager.MaxOrlen - _manager.MinOrlen) + _manager.MinOrlen;
                    result.Date           = sample.Date;
                    var error = new ErrorCalculation();
                    error.UpdateError(actualOutput, predict.Data);
                    result.Error = error.CalculateRMS();
                    results.Add(result);
                }
                index++;
            }
            return(results);
        }
Beispiel #24
0
    public void GetResults()
    {
        foreach (Texture2D texture in OpenFile.Open())
        {
            IMLDataPair pair = new BasicMLDataPair(new BasicMLData(Texture2List(texture).ToArray()));

            IMLData output = basicNetwork.Compute(pair.Input);

            string result = "";
            float  ideal  = 0;

            if (output[0] < 0.25f)
            {
                result = "Normal";
            }
            else if (output[0] > 0.25f && output[0] < 0.75f)
            {
                result = "Esquemico";
                ideal  = 0.5f;
            }
            else
            {
                result = "Hemorragico";
                ideal  = 1.0f;
            }

            Debug.Log("Isso parece ser: " + result + "\nEquivalência{ Imagem Testada: " + output[0] + " Imagem Original: " + ideal + " }");
            debugText.text = ("Isso parece ser: " + result + "\nEquivalência{ Imagem Testada: " + output[0] + " Imagem Original: " + ideal + " }");
        }
    }
Beispiel #25
0
 private static void CalculateProfit(IMLDataSet trainingSet, BasicNetwork network, Results res)
 {
     foreach (var article in trainingSet)
     {
         bool tradeMade = false;
         var  computed  = network.Compute(article.Input);
         for (int i = 0; i < trainingSet.IdealSize; i++)
         {
             if (computed[i] > 0.2)
             {
                 tradeMade = true;
                 double reward = article.Ideal[i] * moneyPerTrade;
                 if (reward > transactionFee)
                 {
                     res.win[i]++;
                 }
                 else
                 {
                     res.loss[i]++;
                 }
                 if (reward > 0)
                 {
                     res.goodDir[i]++;
                 }
                 else
                 {
                     res.badDir[i]++;
                 }
                 res.profit[i] += reward - transactionFee;
             }
             if (computed[i] < -0.2)
             {
                 tradeMade = true;
                 double reward = -article.Ideal[i] * moneyPerTrade;
                 if (reward > transactionFee)
                 {
                     res.win[i]++;
                 }
                 else
                 {
                     res.loss[i]++;
                 }
                 if (reward > 0)
                 {
                     res.goodDir[i]++;
                 }
                 else
                 {
                     res.badDir[i]++;
                 }
                 res.profit[i] += reward - transactionFee;
             }
         }
         if (tradeMade)
         {
             res.articlesTraded++;
         }
     }
 }
Beispiel #26
0
        public static double[] Compute(this BasicNetwork network, double[] input)
        {
            double[] retVal = new double[network.OutputCount];

            network.Compute(input, retVal);

            return(retVal);
        }
Beispiel #27
0
        /// <summary>
        /// Takes a state of the board and returns probability of
        /// first player win.
        /// </summary>
        /// <param name="vector"></param>
        /// <returns></returns>
        public double EvaluateState(Board board)
        {
            //IMLData input = ANNAdapter.Adapt(board);
            IMLData input  = ANNAdapter.Adapt192(board);
            IMLData output = network.Compute(input);

            return(output[0]);
        }
        //public double Travel(ref int timeout, out int movesCnt)
        public double Travel(int timerTimeout, out int movesCnt)
        {
            MazeGame game = new MazeGame();

            game.InitGame(maze);
            game.traveler            = this;
            game.traveler.location.X = maze.StartingPosition.X;
            game.traveler.location.Y = maze.StartingPosition.Y;

            var recentOutcome = new MazeCycleOutcome();

            tmr.Interval = timerTimeout;
            tmr.Start();
            timeout  = 0;
            movesCnt = 0;

            while (!recentOutcome.GameOver && timeout == 0)
            {
                movesCnt++;
                var input = new BasicMLData(2);
                input[0] = xInput.Normalize(Convert.ToDouble(game.traveler.location.X));
                input[1] = yInput.Normalize(Convert.ToDouble(game.traveler.location.Y));

                IMLData output = network.Compute(input);

                double maxVal    = double.MinValue;
                int    direction = 0;
                for (int i = 0; i < output.Count; i++)
                {
                    if (output[i] > maxVal)
                    {
                        direction = i;
                        maxVal    = output[i];
                    }
                }
                recentOutcome = game.CycleMaze(direction);
                MazeCycleComplete?.Invoke(game.traveler.location.X, game.traveler.location.Y, recentOutcome.BumpedIntoWall);
            }

            tmr.Stop();

            var score = game.CalculateFinalScore(movesCnt);

            return(score);
        }
        private void button2_Click(object sender, EventArgs e)
        {
            FeatureExtract Inputs = new FeatureExtract();

            Inputs.dataMagnitude = dataMagnitude;
            Inputs.Feature();
            feature    = new double[7];
            feature[0] = (Inputs.MAV - 18.231) * 2 / (131.22 - 18.231) - 1;
            feature[1] = (Inputs.RMS - 24.78176668) * 2 / (180.9976057 - 24.78176668) - 1;
            feature[2] = (Inputs.VAR - 575.07346) * 2 / (32723.06952 - 575.07346) - 1;
            feature[3] = (Inputs.SD - 23.98068931) * 2 / (180.8951893 - 23.98068931) - 1;
            feature[4] = (Inputs.WL - 411.76) * 2 / (1664.26 - 411.76) - 1;
            feature[5] = (Inputs.ZC - 1) * 2 / (12 - 1) - 1;
            feature[6] = (Inputs.SSC - 3) * 2 / (15 - 3) - 1;

            IMLData q = new BasicMLData(feature);


            BasicNetwork JST = new BasicNetwork();

            openFileDialog1.Title    = "Open Network File...";
            openFileDialog1.FileName = "";
            openFileDialog1.Filter   = "EG (Encog Network)|*.eg|All Files|*.*";
            if (openFileDialog1.ShowDialog() == DialogResult.Cancel)
            {
                MessageBox.Show("Choice Cancelled");
            }
            else
            {
                JST = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(openFileDialog1.FileName));

                IMLData output = JST.Compute(q);


                if (output[0] > output[1] && output[0] > output[2] && output[0] > output[3])
                {
                    label2.Text = "Atas";
                }
                else if (output[1] > output[0] && output[1] > output[2] && output[1] > output[3])
                {
                    label2.Text = "Bawah";
                }
                else if (output[2] > output[1] && output[2] > output[0] && output[2] > output[3])
                {
                    label2.Text = "Kanan";
                }
                else if (output[3] > output[1] && output[3] > output[2] && output[3] > output[0])
                {
                    label2.Text = "Kiri";
                }
                else
                {
                    label2.Text = "Tidak Terdefinisikan";
                }
                label3.Text = "output" + "= " + output[0] + " " + output[1] + " " + output[2] + " " + output[3];
            }
        }
Beispiel #30
0
        public double TestSingle(double[] input)
        {
            double[]     outPut  = new double[1];
            BasicNetwork network = (BasicNetwork)SerializeObject.Load(networkFile);

            network.Compute(input, outPut);
            //Console.Write("The chance of winning is: " + Math.Round(output[0]*100,2) + "% | ");
            return(outPut[0]);
        }