Inheritance: Encog.ML.BasicML, IMLMethod, IContainsFlat, IMLContext, IMLRegression, IMLInputOutput, IMLInput, IMLOutput, IMLEncodable, IMLResettable, IMLClassification, IMLError
        /// <inheritdoc />
        public override void Init(BasicNetwork theNetwork, IMLDataSet theTraining)
        {
            base.Init(theNetwork, theTraining);
            int weightCount = theNetwork.Structure.Flat.Weights.Length;

            _training = theTraining;
            _network = theNetwork;

            _hessianMatrix = new Matrix(weightCount, weightCount);
            _hessian = _hessianMatrix.Data;

            // create worker(s)
            var determine = new DetermineWorkload(
                ThreadCount, _training.Count);

            _workers = new ChainRuleWorker[determine.ThreadCount];

            int index = 0;

            // handle CPU
            foreach (IntRange r in determine.CalculateWorkers())
            {
                _workers[index++] = new ChainRuleWorker((FlatNetwork) _flat.Clone(),
                    _training.OpenAdditional(), r.Low,
                    r.High);
            }
        }
Exemplo n.º 2
0
        public static double EvaluateNetworks(BasicNetwork network, BasicMLDataSet set)
        {
            int count = 0;
            int correct = 0;
            foreach (IMLDataPair pair in set)
            {
                IMLData input = pair.Input;
                IMLData actualData = pair.Ideal;
                IMLData predictData = network.Compute(input);

                double actual = actualData[0];
                double predict = predictData[0];
                double diff = Math.Abs(predict - actual);

               Direction  actualDirection = DetermineDirection(actual);
               Direction predictDirection = DetermineDirection(predict);

                if (actualDirection == predictDirection)
                    correct++;
                count++;
                Console.WriteLine(@"Number" + @"count" + @": actual=" + Format.FormatDouble(actual, 4) + @"(" + actualDirection + @")"
                                  + @",predict=" + Format.FormatDouble(predict, 4) + @"(" + predictDirection + @")" + @",diff=" + diff);
               
            }
            double percent = correct / (double)count;
            Console.WriteLine(@"Direction correct:" + correct + @"/" + count);
            Console.WriteLine(@"Directional Accuracy:"
                              + Format.FormatPercent(percent));

            return percent;
        }
        /// <summary>
        ///   Measure the performance of the network
        /// </summary>
        /// <param name = "network">Network to analyze</param>
        /// <param name = "dataset">Dataset with input and ideal data</param>
        /// <returns>Error % of correct bits, returned by the network.</returns>
        public static double MeasurePerformance(BasicNetwork network, BasicNeuralDataSet dataset)
        {
            int correctBits = 0;
            float threshold = 0.0f;
            IActivationFunction activationFunction = network.GetActivation(network.LayerCount - 1); //get the activation function of the output layer
            if (activationFunction is ActivationSigmoid)
            {
                threshold = 0.5f; /* > 0.5, range of sigmoid [0..1]*/
            }
            else if (activationFunction is ActivationTANH)
            {
                threshold = 0.0f; /*> 0, range of bipolar sigmoid is [-1..1]*/
            }
            else
                throw new ArgumentException("Bad activation function");
            int n = (int) dataset.Count;

            Parallel.For(0, n, (i) =>
                               {
                                   IMLData actualOutputs = network.Compute(dataset.Data[i].Input);
                                   lock (LockObject)
                                   {
                                       for (int j = 0, k = actualOutputs.Count; j < k; j++)
                                           if ((actualOutputs[j] > threshold && dataset.Data[i].Ideal[j] > threshold)
                                               || (actualOutputs[j] < threshold && dataset.Data[i].Ideal[j] < threshold))
                                               correctBits++;
                                   }
                               });

            long totalOutputBitsCount = dataset.Count*dataset.Data[0].Ideal.Count;

            return (double) correctBits/totalOutputBitsCount;
        }
Exemplo n.º 4
0
        public override void Run()
        {
            testNetwork = new BasicNetwork();

            testNetwork.AddLayer(new BasicLayer(null, true, 2));
            testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4));
            testNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            testNetwork.Structure.FinalizeStructure();
            testNetwork.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new Backpropagation(testNetwork, trainingSet);
            //IMLTrain train = new ResilientPropagation(testNetwork, trainingSet); //Encog manual says it is the best general one

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.0001);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = testNetwork.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Exemplo n.º 5
0
 public TrainAdaline(BasicNetwork network, IMLDataSet training, double learningRate)
     : base(TrainingImplementationType.Iterative)
 {
     if (((uint) learningRate) > uint.MaxValue)
     {
         goto Label_003B;
     }
     Label_0009:
     if (network.LayerCount > 2)
     {
         goto Label_003B;
     }
     Label_0012:
     this._x87a7fc6a72741c2e = network;
     this._x823a2b9c8bf459c5 = training;
     this._x9b481c22b6706459 = learningRate;
     return;
     Label_003B:
     throw new NeuralNetworkError("An ADALINE network only has two layers.");
     if (0x7fffffff == 0)
     {
         goto Label_0009;
     }
     goto Label_0012;
 }
Exemplo n.º 6
0
        static void Main(string[] args)
        {
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            var train = new ResilientPropagation(network, trainingSet);
            var epoch = 1;
            do
            {
                train.Iteration();

            } while (train.Error > 0.01);

            train.FinishTraining();

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]);
            }

            EncogFramework.Instance.Shutdown();
            Console.ReadLine();
        }
        public static void evaluateNetwork(BasicNetwork network, IMLDataSet training)
        {
            double total = 0;
            int seed = 0;
            int completed = 0;

            Stopwatch sw = new Stopwatch();

            sw.Start();
            while (completed < SAMPLE_SIZE)
            {
                new ConsistentRandomizer(-1, 1, seed).Randomize(network);
                int iter = Evaluate(network, training);
                if (iter == -1)
                {
                    seed++;
                }
                else
                {
                    total += iter;
                    seed++;
                    completed++;
                }
            }

            sw.Stop();


            Console.WriteLine(network.GetActivation(1).GetType().Name + ": time="
                    + Format.FormatInteger((int)sw.ElapsedMilliseconds)
                    + "ms, Avg Iterations: "
                    + Format.FormatInteger((int)(total / SAMPLE_SIZE)));

        }
Exemplo n.º 8
0
        static void Main(string[] args)
        {
            //create a neural network withtout using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));

            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;
            do
            {
                train.Iteration();
                Console.WriteLine($"Epoch #{epoch} Error: {train.Error}");
                epoch++;
            } while (train.Error > 0.01);
            train.FinishTraining();

            Console.WriteLine("Neural Network Results:");
            foreach (IMLDataPair iPair in trainingSet)
            {
                IMLData output = network.Compute(iPair.Input);
                Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}");
            }

            EncogFramework.Instance.Shutdown();

            Console.ReadKey();
        }
Exemplo n.º 9
0
        public void TestSingleOutput()
        {

            BasicNetwork network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();

            (new ConsistentRandomizer(-1, 1)).Randomize(network);

            IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);

            HessianFD testFD = new HessianFD();
            testFD.Init(network, trainingData);
            testFD.Compute();

            HessianCR testCR = new HessianCR();
            testCR.Init(network, trainingData);
            testCR.Compute();

            //dump(testFD, "FD");
            //dump(testCR, "CR");
            Assert.IsTrue(testCR.HessianMatrix.equals(testFD.HessianMatrix, 4));
        }
Exemplo n.º 10
0
        private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e)
        {
            worker.ReportProgress(0, "Creating Network...");
            BasicNetwork Network = new BasicNetwork();
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50));
            Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize));
            Network.Structure.FinalizeStructure();
            Network.Reset();
            DataContainer.NeuralNetwork.Network = Network;

            ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data);
            worker.ReportProgress(0, "Running Training: Epoch 0");
            for(int i = 0; i < 200; i++)
            {
                training.Iteration();
                worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + "     Current Training Error : " + training.Error.ToString());
                if(worker.CancellationPending == true)
                {
                    completed = true;
                    return;
                }

            }
            completed = true;
        }
        public static long BenchmarkEncog(double[][] input, double[][] output)
        {
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true,
                                            input[0].Length));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true,
                                            HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false,
                                            output[0].Length));
            network.Structure.FinalizeStructure();
            network.Reset(23); // constant seed for repeatable testing

            IMLDataSet trainingSet = new BasicMLDataSet(input, output);

            // train the neural network
            IMLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);

            var sw = new Stopwatch();
            sw.Start();
            // run epoch of learning procedure
            for (int i = 0; i < ITERATIONS; i++)
            {
                train.Iteration();
            }
            sw.Stop();

            return sw.ElapsedMilliseconds;
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            // train the neural network
            IMLTrain train = new ResilientPropagation(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(pair.Input[0] + @"," + pair.Input[1]
                                  + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }
        }
Exemplo n.º 13
0
       /// <summary>
       /// Saves the network to the specified directory with the specified parameter name.
       /// </summary>
       /// <param name="directory">The directory.</param>
       /// <param name="file">The file.</param>
       /// <param name="anetwork">The network to save..</param>
       public static void SaveNetwork(string directory, string file, BasicNetwork anetwork)
       {
           FileInfo networkFile = FileUtil.CombinePath(new FileInfo(directory), file);
           EncogDirectoryPersistence.SaveObject(networkFile, anetwork);
           return;

       }
Exemplo n.º 14
0
 public JacobianChainRule(BasicNetwork network, IMLDataSet indexableTraining)
 {
     BasicMLData data;
     BasicMLData data2;
     if (0 == 0)
     {
         goto Label_0055;
     }
     Label_0009:
     this._x61830ac74d65acc3 = new BasicMLDataPair(data, data2);
     return;
     Label_0055:
     this._xb12276308f0fa6d9 = indexableTraining;
     if (0 == 0)
     {
     }
     this._x87a7fc6a72741c2e = network;
     this._xabb126b401219ba2 = network.Structure.CalculateSize();
     this._x530ae94d583e0ea1 = (int) this._xb12276308f0fa6d9.Count;
     this._xbdeab667c25bbc32 = EngineArray.AllocateDouble2D(this._x530ae94d583e0ea1, this._xabb126b401219ba2);
     this._xc8a462f994253347 = new double[this._x530ae94d583e0ea1];
     data = new BasicMLData(this._xb12276308f0fa6d9.InputSize);
     data2 = new BasicMLData(this._xb12276308f0fa6d9.IdealSize);
     if (-2147483648 != 0)
     {
         goto Label_0009;
     }
     goto Label_0055;
 }
Exemplo n.º 15
0
        /// <summary>
        /// Randomize the connections between two layers.
        /// </summary>
        /// <param name="network">The network to randomize.</param>
        /// <param name="fromLayer">The starting layer.</param>
        private void RandomizeSynapse(BasicNetwork network, int fromLayer)
        {
            int toLayer = fromLayer + 1;
            int toCount = network.GetLayerNeuronCount(toLayer);
            int fromCount = network.GetLayerNeuronCount(fromLayer);
            int fromCountTotalCount = network.GetLayerTotalNeuronCount(fromLayer);
            IActivationFunction af = network.GetActivation(toLayer);
            double low = CalculateRange(af, Double.NegativeInfinity);
            double high = CalculateRange(af, Double.PositiveInfinity);

            double b = 0.7d * Math.Pow(toCount, (1d / fromCount)) / (high - low);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                if (fromCount != fromCountTotalCount)
                {
                    double w = RangeRandomizer.Randomize(-b, b);
                    network.SetWeight(fromLayer, fromCount, toNeuron, w);
                }
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = RangeRandomizer.Randomize(0, b);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
Exemplo n.º 16
0
 public NeuralRobot(BasicNetwork network, bool track, Position source, Position destination)
 {
     _hStats = new NormalizedField(NormalizationAction.Normalize, "Heading", 359, 0, .9, -.9);
     _CanGoStats = new NormalizedField(NormalizationAction.Normalize, "CanGo", 1, 0, 0.9, -0.9);
     _track = track;
     _network = network;
     sim = new RobotSimulator(source, destination);
 }
Exemplo n.º 17
0
 public void Execute()
 {
     _normalizedTrainingData = NormalizeData(trainingData);
     _normalizedPredictionData = NormalizeData(predictionData);
     network = CreateNetwork();
     IMLDataSet training = GenerateTraining();
     Train(training);
     Predict();
 }
Exemplo n.º 18
0
        public MainWindow()
        {
            neuralNetIter = null;
            neuralNetPair = null;

            rep = new TrainLogRepository(AppConfigHelper.GetCollectionName, AppConfigHelper.GetDBName, AppConfigHelper.GetMongoDBConnectionString);

            InitializeComponent();
        }
 public void EvaluateNetwork(BasicNetwork trainedNetwork, BasicMLDataSet trainingData)
 {
     foreach (var trainingItem in trainingData)
     {
         var output = trainedNetwork.Compute(trainingItem.Input);
         Console.WriteLine("Input:{0}, {1}  Ideal: {2}  Actual : {3}", trainingItem.Input[0], trainingItem.Input[1], trainingItem.Ideal, output[0]);
     }
     Console.ReadKey();
 }
Exemplo n.º 20
0
        /// <summary>
        /// Create a new tester form
        /// </summary>
        /// <param name="network">Trained neural network to test</param>
        /// <param name="inputFields">List of input fields from Encog Analyst</param>
        /// <param name="outputFields">List of output fields from Encog Analyst</param>
        public frmTest(BasicNetwork network, List<AnalystField> inputFields,
            List<AnalystField> outputFields)
        {
            InitializeComponent();

            m_network = network;
            m_inputFields = inputFields;
            m_outputFields = outputFields;

            foreach(AnalystField field in inputFields)
            {
                switch(field.Name)
                {
                    case "vCoverageType":
                        foreach (ClassItem item in field.Classes)
                            cmbCoverageType.Items.Add(item.Name);
                        cmbCoverageType.SelectedIndex = 0;
                        break;
                    case "vTransaction":
                        foreach (ClassItem item in field.Classes)
                            cmbTransactionType.Items.Add(item.Name);
                        cmbTransactionType.SelectedIndex = 0;
                        break;
                    case "nLoanAmount":
                        trkLoanAmount.Minimum = (int)field.ActualLow;
                        trkLoanAmount.Maximum = (int)field.ActualHigh;
                        trkLoanAmount.TickFrequency = (int)(field.ActualHigh / 10.0);
                        txtLoanAmount.Text = String.Format("{0:C2}", trkLoanAmount.Value);
                        break;
                    case "nLiens":
                        trkLiens.Minimum = (int)field.ActualLow;
                        trkLiens.Maximum = (int)field.ActualHigh;
                        trkLiens.TickFrequency = (int)(field.ActualHigh / 10.0);
                        txtLiens.Text = trkLiens.Value.ToString();
                        break;
                    case "nActions":
                        trkActions.Minimum = (int)field.ActualLow;
                        trkActions.Maximum = (int)field.ActualHigh;
                        trkActions.TickFrequency = (int)(field.ActualHigh / 10.0);
                        txtActions.Text = trkActions.Value.ToString();
                        break;
                    case "nAuditEntriesPerDay":
                        trkAuditEntries.Minimum = (int)field.ActualLow;
                        trkAuditEntries.Maximum = (int)field.ActualHigh;
                        trkAuditEntries.TickFrequency = (int)(field.ActualHigh / 10.0);
                        txtAuditEntries.Text = trkAuditEntries.Value.ToString();
                        break;
                    case "nTotalNotesPerDay":
                        trkNotesLogged.Minimum = (int)field.ActualLow;
                        trkNotesLogged.Maximum = (int)field.ActualHigh;
                        trkNotesLogged.TickFrequency = (int)(field.ActualHigh / 10.0);
                        txtNotesLogged.Text = trkNotesLogged.Value.ToString();
                        break;
                }
            }
        }
 public BasicNetwork generateNetwork()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(INPUT_COUNT));
     network.AddLayer(new BasicLayer(HIDDEN_COUNT));
     network.AddLayer(new BasicLayer(OUTPUT_COUNT));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Exemplo n.º 22
0
 private static BasicNetwork CreateNetwork()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(null, true, 2));
     network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
     network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Exemplo n.º 23
0
 public void Create(int inputnodes,int hiddennodes)
 {
     network = new BasicNetwork();
     network.AddLayer(new BasicLayer(null, true, inputnodes));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, hiddennodes));
     network.AddLayer(new BasicLayer(new ActivationLinear(), false, 1));
     network.Structure.FinalizeStructure();
     network.Reset();
     this.hiddennodes = hiddennodes;
 }
Exemplo n.º 24
0
 public static BasicNetwork createElliott()
 {
     BasicNetwork network = new BasicNetwork();
     network.AddLayer(new BasicLayer(null, true, INPUT_OUTPUT));
     network.AddLayer(new BasicLayer(new ActivationElliottSymmetric(), true, HIDDEN));
     network.AddLayer(new BasicLayer(new ActivationElliottSymmetric(), false, INPUT_OUTPUT));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
 /// <summary>
 /// Metodo responsavel por criar a rede neural
 /// </summary>
 /// <param name="source">FileInfo com o path do network</param>
 private static void CreateNetwork(FileInfo source)
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(new ActivationLinear(), true, 4));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6));
     network.AddLayer(new BasicLayer(new ActivationTANH(), false, 2));
     network.Structure.FinalizeStructure();
     network.Reset();
     EncogDirectoryPersistence.SaveObject(source, (BasicNetwork)network);
 }
 private BasicNetwork ConstructNetwork()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, VanDerWaerdenGameRules.VanDerWaerdenNumber(this.NColors, this.ProgressionLength) - 1));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, VanDerWaerdenGameRules.VanDerWaerdenNumber(this.NColors, this.ProgressionLength)));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 1));
     network.Structure.FinalizeStructure();
     return network;
     Debug.Print("Created new Network with parameters nColors = {0} and progression length = {1}.", NColors, ProgressionLength);
 }
 public static void CreateNetwork(FileOps fileOps)
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(new ActivationLinear(),true,4));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 6));
     network.AddLayer(new BasicLayer(new ActivationTANH(), true, 2));
     network.Structure.FinalizeStructure();
     network.Reset();
     EncogDirectoryPersistence.SaveObject(fileOps.TrainedNeuralNetworkFile, network);
 }
 public BasicNetwork generateNetwork()
 {
     BasicNetwork network = new BasicNetwork();
     network.AddLayer(new BasicLayer(MultiThreadBenchmark.INPUT_COUNT));
     network.AddLayer(new BasicLayer(MultiThreadBenchmark.HIDDEN_COUNT));
     network.AddLayer(new BasicLayer(MultiThreadBenchmark.OUTPUT_COUNT));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Exemplo n.º 29
0
Arquivo: XOR.cs Projeto: jongh0/MTree
 public static BasicNetwork CreateThreeLayerNet()
 {
     var network = new BasicNetwork();
     network.AddLayer(new BasicLayer(2));
     network.AddLayer(new BasicLayer(3));
     network.AddLayer(new BasicLayer(1));
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Exemplo n.º 30
0
        public NeuralPilot(BasicNetwork network, bool track)
        {
            _fuelStats = new NormalizedField(NormalizationAction.Normalize, "fuel", 200, 0, -0.9, 0.9);
            _altitudeStats = new NormalizedField(NormalizationAction.Normalize, "altitude", 10000, 0, -0.9, 0.9);
            _velocityStats = new NormalizedField(NormalizationAction.Normalize, "velocity",
                                                LanderSimulator.TerminalVelocity, -LanderSimulator.TerminalVelocity,
                                                -0.9, 0.9);

            _track = track;
            _network = network;
        }
Exemplo n.º 31
0
 /// <summary>
 /// Compare the two neural networks. For them to be equal they must be of the
 /// same structure, and have the same matrix values.
 /// </summary>
 /// <param name="other">The other neural network.</param>
 /// <returns>True if the two networks are equal.</returns>
 public bool Equals(BasicNetwork other)
 {
     return(Equals(other,
                   EncogFramework.DEFAULT_PRECISION));
 }
Exemplo n.º 32
0
 /// <summary>
 /// Compare the two neural networks. For them to be equal they must be of the
 /// same structure, and have the same matrix values.
 /// </summary>
 ///
 /// <param name="other">The other neural network.</param>
 /// <returns>True if the two networks are equal.</returns>
 public bool Equals(BasicNetwork other)
 {
     return(Equals(other, EncogFramework.DefaultPrecision));
 }
Exemplo n.º 33
0
 /// <summary>
 /// Determine if this neural network is equal to another. Equal neural
 /// networks have the same weight matrix and bias values, within a specified
 /// precision.
 /// </summary>
 ///
 /// <param name="other">The other neural network.</param>
 /// <param name="precision">The number of decimal places to compare to.</param>
 /// <returns>True if the two neural networks are equal.</returns>
 public bool Equals(BasicNetwork other, int precision)
 {
     return(NetworkCODEC.Equals(this, other, precision));
 }
        /// <summary>
        /// Read an object.
        /// </summary>
        ///
        public Object Read(Stream mask0)
        {
            var result = new BasicNetwork();
            var flat   = new FlatNetwork();
            var ins0   = new EncogReadHelper(mask0);
            EncogFileSection section;

            while ((section = ins0.ReadNextSection()) != null)
            {
                if (section.SectionName.Equals("BASIC") &&
                    section.SubSectionName.Equals("PARAMS"))
                {
                    IDictionary <String, String> paras = section.ParseParams();
                    EngineArray.PutAll(paras, result.Properties);
                }
                if (section.SectionName.Equals("BASIC") &&
                    section.SubSectionName.Equals("NETWORK"))
                {
                    IDictionary <String, String> p = section.ParseParams();

                    flat.BeginTraining = EncogFileSection.ParseInt(p,
                                                                   BasicNetwork.TagBeginTraining);
                    flat.ConnectionLimit = EncogFileSection.ParseDouble(p,
                                                                        BasicNetwork.TagConnectionLimit);
                    flat.ContextTargetOffset = EncogFileSection.ParseIntArray(
                        p, BasicNetwork.TagContextTargetOffset);
                    flat.ContextTargetSize = EncogFileSection.ParseIntArray(
                        p, BasicNetwork.TagContextTargetSize);
                    flat.EndTraining = EncogFileSection.ParseInt(p,
                                                                 BasicNetwork.TagEndTraining);
                    flat.HasContext = EncogFileSection.ParseBoolean(p,
                                                                    BasicNetwork.TagHasContext);
                    flat.InputCount = EncogFileSection.ParseInt(p,
                                                                PersistConst.InputCount);
                    flat.LayerCounts = EncogFileSection.ParseIntArray(p,
                                                                      BasicNetwork.TagLayerCounts);
                    flat.LayerFeedCounts = EncogFileSection.ParseIntArray(p,
                                                                          BasicNetwork.TagLayerFeedCounts);
                    flat.LayerContextCount = EncogFileSection.ParseIntArray(
                        p, BasicNetwork.TagLayerContextCount);
                    flat.LayerIndex = EncogFileSection.ParseIntArray(p,
                                                                     BasicNetwork.TagLayerIndex);
                    flat.LayerOutput = section.ParseDoubleArray(p, PersistConst.Output);
                    flat.LayerSums   = new double[flat.LayerOutput.Length];
                    flat.OutputCount = EncogFileSection.ParseInt(p,
                                                                 PersistConst.OutputCount);
                    flat.WeightIndex = EncogFileSection.ParseIntArray(p,
                                                                      BasicNetwork.TagWeightIndex);
                    flat.Weights        = section.ParseDoubleArray(p, PersistConst.Weights);
                    flat.BiasActivation = section.ParseDoubleArray(p, BasicNetwork.TagBiasActivation);
                }
                else if (section.SectionName.Equals("BASIC") &&
                         section.SubSectionName.Equals("ACTIVATION"))
                {
                    int index = 0;

                    flat.ActivationFunctions = new IActivationFunction[flat.LayerCounts.Length];


                    foreach (String line  in  section.Lines)
                    {
                        IActivationFunction af;
                        IList <String>      cols = EncogFileSection
                                                   .SplitColumns(line);
                        String name = ReflectionUtil.AfPath
                                      + cols[0];
                        try
                        {
                            af = (IActivationFunction)ReflectionUtil.LoadObject(name);
                        }
                        catch (TypeLoadException e)
                        {
                            throw new PersistError(e);
                        }
                        catch (TargetException e)
                        {
                            throw new PersistError(e);
                        }
                        catch (MemberAccessException e)
                        {
                            throw new PersistError(e);
                        }

                        for (int i = 0; i < af.ParamNames.Length; i++)
                        {
                            af.Params[i] =
                                CSVFormat.EgFormat.Parse(cols[i + 1]);
                        }

                        flat.ActivationFunctions[index++] = af;
                    }
                }
            }

            result.Structure.Flat = flat;

            return(result);
        }
        /// <summary>
        /// Mencari solusi model neural network
        /// </summary>
        private void searchSolution()
        {
            // Normalize Data
            switch (this.selectedActivationFunction)
            {
            case ActivationFunctionEnumeration.SemiLinearFunction:
                this.activationFunction = new SemiLinearFunction();
                this.normalizeData(0.1, 0.9);
                break;

            case ActivationFunctionEnumeration.SigmoidFunction:
                this.activationFunction = new SigmoidFunction();
                this.normalizeData(0.1, 0.9);
                break;

            case ActivationFunctionEnumeration.BipolarSigmoidFunction:
                this.activationFunction = new BipolarSigmoidFunction();
                this.normalizeData(-0.9, 0.9);
                break;

            case ActivationFunctionEnumeration.HyperbolicTangentFunction:
                this.activationFunction = new HyperbolicTangentFunction();
                this.normalizeData(-0.9, 0.9);
                break;

            default:
                this.activationFunction = new BipolarSigmoidFunction();
                this.normalizeData(-0.9, 0.9);
                break;
            }

            //create network
            this.network = new BasicNetwork();
            this.network.AddLayer(new FeedforwardLayer(this.activationFunction, this.inputLayerNeurons));
            this.network.AddLayer(new FeedforwardLayer(this.activationFunction, this.hiddenLayerNeurons));
            this.network.AddLayer(new FeedforwardLayer(this.activationFunction, this.outputLayerNeurons));
            this.network.Reset();

            //variable for looping
            //needToStop = false;
            double mse = 0.0, error = 0.0, mae = 0.0;
            int    iteration = 1;

            // parameters
            double msle = 0.0, mspe = 0.0, generalizationLoss = 0.0, pq = 0.0;

            double[] trainingErrors = new double[this.strip];
            for (int i = 0; i < this.strip; i++)
            {
                trainingErrors[i] = double.MaxValue / strip;
            }

            double lastMSE = double.MaxValue;

            // advanced early stopping
            int n             = this.data.Length - this.network.InputLayer.NeuronCount;
            int validationSet = (int)Math.Round(this.validationSetRatio * n);
            int trainingSet   = n - validationSet;

            double[][] networkTrainingInput  = new double[trainingSet][];
            double[][] networkTrainingOutput = new double[trainingSet][];
            for (int i = 0; i < trainingSet; i++)
            {
                networkTrainingInput[i]  = new double[this.network.InputLayer.NeuronCount];
                networkTrainingOutput[i] = new double[1];
            }
            for (int i = 0; i < trainingSet; i++)
            {
                for (int j = 0; j < this.network.InputLayer.NeuronCount; j++)
                {
                    networkTrainingInput[i][j] = this.networkInput[i][j];
                }
                networkTrainingOutput[i][0] = this.networkOutput[i][0];
            }

            // validation set
            double[] solutionValidation        = new double[validationSet];
            double[] inputForValidation        = new double[this.network.InputLayer.NeuronCount];
            double[] inputForValidationNetwork = new double[this.network.InputLayer.NeuronCount];

            // array for saving neural weights and parameters
            this.bestValidationError = double.MaxValue;
            this.bestWeightMatrix    = new double[this.network.Layers.Count - 1][, ];
            this.bestSolution        = new double[n];

            for (int i = 0; i < this.network.Layers.Count - 1; i++)
            {
                this.bestWeightMatrix[i] = new double[this.network.Layers[i].WeightMatrix.Rows, this.network.Layers[i].WeightMatrix.Cols];
            }

            //best network criterion
            double bestNetworkError = double.MaxValue, bestNetworkMSE = double.MaxValue, bestNetworkMAE = double.MaxValue;

            // build array for graph
            this.solutionData    = new double[n];
            this.predictedPoint  = new cPoint[n];
            this.validationPoint = new cPoint[validationSet];

            //initialize point for graph
            predictedDS.Samples     = predictedPoint;
            validationDS.Samples    = validationPoint;
            this.predictedDS.Active = true;


            // prepare training data
            INeuralDataSet dataset;

            if (this.useAdvanceEarlyStopping)
            {
                dataset = new BasicNeuralDataSet(networkTrainingInput, networkTrainingOutput);
            }
            else
            {
                dataset = new BasicNeuralDataSet(this.networkInput, this.networkOutput);
            }


            // initialize trainer
            this.learning = new Backpropagation(this.network, dataset, this.learningRate, this.momentum);


            //training
            while (!needToStop)
            {
                double sse  = 0.0;
                double sae  = 0.0;
                double ssle = 0.0;
                double sspe = 0.0;

                this.learning.Iteration();
                error = learning.Error;


                if (this.useAdvanceEarlyStopping)
                {
                    this.validationDS.Active = true;
                }
                else
                {
                    this.validationDS.Active = false;
                }

                for (int i = 0; i < n; i++)
                {
                    INeuralData neuraldata = new BasicNeuralData(this.networkInput[i]);

                    this.solutionData[i] = (this.network.Compute(neuraldata)[0]
                                            - this.minNormalizedData) / this.factor + this.minData;

                    this.predictedPoint[i].x = i + this.network.InputLayer.NeuronCount;
                    this.predictedPoint[i].y = (float)this.solutionData[i];

                    sse += Math.Pow(this.solutionData[i] - this.data[i + this.network.InputLayer.NeuronCount], 2);
                    sae += Math.Abs(this.solutionData[i] - this.data[i + this.network.InputLayer.NeuronCount]);

                    //calculate advance early stopping
                    if (this.useAdvanceEarlyStopping)
                    {
                        if (i < n - validationSet)
                        {
                            ssle += Math.Pow(this.solutionData[i] - this.data[i + this.network.InputLayer.NeuronCount], 2);
                        }
                        else
                        {
                            // initialize the first validation set input
                            if (i == n - validationSet)
                            {
                                for (int j = 0; j < this.network.InputLayer.NeuronCount; j++)
                                {
                                    inputForValidation[this.network.InputLayer.NeuronCount - 1 - j] = this.data[this.data.Length - (n - i) - 1 - j];
                                }
                            }

                            for (int j = 0; j < this.network.InputLayer.NeuronCount; j++)
                            {
                                inputForValidationNetwork[j] = (inputForValidation[j] - this.minData) * this.factor + this.minNormalizedData;
                            }

                            INeuralData neuraldataval = new BasicNeuralData(inputForValidationNetwork);
                            solutionValidation[i - n + validationSet] = (this.network.Compute(neuraldataval)[0] - this.minNormalizedData) / this.factor + this.minData;

                            this.validationPoint[i - n + validationSet].x = i + this.network.InputLayer.NeuronCount;
                            this.validationPoint[i - n + validationSet].y = (float)solutionValidation[i - n + validationSet];

                            sspe += Math.Pow(this.data[i + this.network.InputLayer.NeuronCount] - solutionValidation[i - n + validationSet], 2);

                            // initialize the next validation set input from the current validation set input
                            for (int j = 0; j < this.network.InputLayer.NeuronCount - 1; j++)
                            {
                                inputForValidation[j] = inputForValidation[j + 1];
                            }

                            inputForValidation[this.network.InputLayer.NeuronCount - 1] = solutionValidation[i - n + validationSet];
                        }
                    }
                }

                mse = sse / this.solutionData.Length;
                mae = sae / this.solutionData.Length;

                //Console.WriteLine(error.ToString());

                //Display it
                this.iterationBox.Text = iteration.ToString();
                this.maeBox.Text       = mae.ToString("F5");
                this.mseBox.Text       = mse.ToString("F5");
                this.errorBox.Text     = error.ToString("F5");


                seriesGraph.Refresh();

                if (this.useAdvanceEarlyStopping)
                {
                    //calculate advance early stopping 2
                    mspe = sspe / validationSet;
                    msle = ssle / (this.solutionData.Length - validationSet);

                    //save best weight
                    if (this.bestValidationError > mspe)
                    {
                        this.bestValidationError = mspe;
                        this.bestSolution        = this.solutionData;

                        // weight matrix
                        for (int i = 0; i < this.network.Layers.Count - 1; i++)
                        {
                            for (int j = 0; j < this.network.Layers[i].WeightMatrix.Rows; j++)
                            {
                                for (int k = 0; k < this.network.Layers[i].WeightMatrix.Cols; k++)
                                {
                                    this.bestWeightMatrix[i][j, k] = this.network.Layers[i].WeightMatrix[j, k];
                                }
                            }
                        }

                        bestNetworkError = error;
                        bestNetworkMAE   = mae;
                        bestNetworkMSE   = mse;
                    }
                    //calculate generalization loss &pq
                    generalizationLoss = 100 * (mspe / this.bestValidationError - 1);

                    trainingErrors[(iteration - 1) % this.strip] = msle;
                    double minStripTrainingError = double.MaxValue, sumStripTrainingError = 0.0;
                    for (int i = 0; i < this.strip; i++)
                    {
                        sumStripTrainingError += trainingErrors[i];
                        if (trainingErrors[i] < minStripTrainingError)
                        {
                            minStripTrainingError = trainingErrors[i];
                        }
                    }
                    double trainingProgress = 1000 * ((sumStripTrainingError / (this.strip * minStripTrainingError)) - 1);
                    pq = generalizationLoss / trainingProgress;



                    //display advance early stopping
                    this.learningErrorBox.Text      = msle.ToString("F5");
                    this.validationErrorBox.Text    = mspe.ToString("F5");
                    this.generalizationLossBox.Text = generalizationLoss.ToString("F5");
                    this.pqBox.Text = pq.ToString("F5");
                    this.seriesGraph.Refresh();

                    //stopping
                    switch (this.advanceStoppingMethod)
                    {
                    case AdvanceStoppingMethodEnumeration.GeneralizationLoss:
                        if (generalizationLoss > this.generalizationLossTreshold)
                        {
                            needToStop = true;
                        }
                        break;

                    case AdvanceStoppingMethodEnumeration.ProgressQuotient:
                        if (pq > this.pqTreshold)
                        {
                            needToStop = true;
                        }
                        break;
                    }
                }

                if (this.withCheckingCycle && iteration % this.checkingCycle == 0)
                {
                    switch (this.checkingMethod)
                    {
                    case CheckingMethodEnumeration.byMSEValue:
                        if (mse <= this.byMSEValueStopping)
                        {
                            needToStop = true;
                        }
                        break;

                    case CheckingMethodEnumeration.byMSEChange:
                        if (lastMSE - mse <= this.byMSEChangeStopping)
                        {
                            needToStop = true;
                        }
                        break;
                    }
                    lastMSE = mse;
                }
                if (iteration >= this.maxIteration)
                {
                    needToStop = true;
                }

                iteration++;
            }

            //restore weight
            if (this.useAdvanceEarlyStopping)
            {
                this.solutionData = this.bestSolution;

                // weight matrix

                for (int i = 0; i < this.network.Layers.Count - 1; i++)
                {
                    for (int j = 0; j < this.network.Layers[i].WeightMatrix.Rows; j++)
                    {
                        for (int k = 0; k < this.network.Layers[i].WeightMatrix.Cols; k++)
                        {
                            this.network.Layers[i].WeightMatrix[j, k] = this.bestWeightMatrix[i][j, k];
                        }
                    }
                }

                //best network criterion
                this.error = bestNetworkError;
                this.mse   = bestNetworkMSE;
                this.mae   = bestNetworkMAE;
            }
            else
            {
                this.error = error;
                this.mse   = mse;
                this.mae   = mae;
            }

            this.enableControls(true);
        }