Ejemplo n.º 1
0
        /// <summary>
        /// Calculates weights for the hopfield net to learn the specified training
        /// set 
        /// </summary>
        /// <param name="trainingSet">training set to learn</param>
        public override void Learn(TrainingSet trainingSet)
        {
            int M = trainingSet.Count;
            int N = this.NeuralNetwork.GetLayerAt(0).NeuronsCount;
            Layer hopfieldLayer = this.NeuralNetwork.GetLayerAt(0);

            for (int i = 0; i < N; i++)
            {
                for (int j = 0; j < N; j++)
                {
                    if (j == i)
                        continue;
                    Neuron ni = hopfieldLayer.GetNeuronAt(i);
                    Neuron nj = hopfieldLayer.GetNeuronAt(j);
                    Connection cij = nj.GetConnectionFrom(ni);
                    Connection cji = ni.GetConnectionFrom(nj);
                    double w = 0;
                    for (int k = 0; k < M; k++)
                    {
                        TrainingElement trainingElement = trainingSet.ElementAt(k);
                        double pki = trainingElement.Input[i];
                        double pkj = trainingElement.Input[j];
                        w = w + pki * pkj;
                    } // k
                    cij.ConnectionWeight.Value = w;
                    cji.ConnectionWeight.Value = w;
                } // j
            } // i
        }
        public void Run()
        {
            // create training set (logical XOR function)
            TrainingSet trainingSet = new TrainingSet(2, 1);
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 0, 0 }, new double[] { 0 }));
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 0, 1 }, new double[] { 1 }));
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 1, 0 }, new double[] { 1 }));
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 1, 1 }, new double[] { 0 }));

            // create multi layer perceptron
            MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1);
            // learn the training set
            Console.WriteLine("Training neural network...");
            myMlPerceptron.LearnInSameThread(trainingSet);

            // test perceptron
            Console.WriteLine("Testing trained neural network");
            TestNeuralNetwork(myMlPerceptron, trainingSet);

            // save trained neural network
            myMlPerceptron.Save("myMlPerceptron.nnet");

            // load saved neural network
            NeuralNetwork loadedMlPerceptron = NeuralNetwork.Load("myMlPerceptron.nnet");

            // test loaded neural network
            //Console.WriteLine("Testing loaded neural network");
            //testNeuralNetwork(loadedMlPerceptron, trainingSet);
        }
Ejemplo n.º 3
0
 /// <summary>
 /// This method does one learning epoch for the unsupervised learning rules.
 /// It iterates through the training set and trains network weights for each
 /// element 
 /// </summary>
 /// <param name="trainingSet">training set for training network</param>
 public override void DoLearningEpoch(TrainingSet trainingSet)
 {
     IEnumerator<TrainingElement> iterator = trainingSet.GetEnumerator();
     while (iterator.MoveNext() && !IsStopped)
     {
         TrainingElement trainingElement = iterator.Current;
         LearnPattern(trainingElement);
     }
 }
Ejemplo n.º 4
0
 /// <summary>
 /// Calculates the error for a given network, training sets, and weights.
 /// </summary>
 /// <param name="network">The network to calculate the error on</param>
 /// <param name="sets">The training set to use</param>
 /// <param name="weights">The weights to use</param>
 /// <returns>1/2 of the sum of the squares of the errors for each training set</returns>
 public static double Error(this Network network, TrainingSet[] sets, double[][][] weights)
 {
     var error = 0.0;
     for (var i = 0; i < sets.Length; i++)
     {
         error += network.Error(sets[i], weights);
     }
     return error;
 }
Ejemplo n.º 5
0
 static TrainingSet[] GenerateTrainingSets(int numberOfSets)
 {
     var trainingSets = new TrainingSet[numberOfSets];
     for (var i = 0; i < numberOfSets; i++)
     {
         var input = new double[] { Program.Random.Next(-20, 20), Program.Random.Next(-20, 20) };
         var output = Math.Sign(input[0]) == Math.Sign(input[1]) ? 1 : 0;
         trainingSets[i] = new TrainingSet(input, new double[] { output });
     }
     return trainingSets;
 }
Ejemplo n.º 6
0
        /// <summary>
        /// Calculates the error for a given network, training set, and weights.
        /// Error = (1/2) sum (calculated - actual)^2
        /// </summary>
        /// <param name="network">The network to calculate the error on</param>
        /// <param name="set">The training set to use</param>
        /// <param name="weights">The weights to use</param>
        /// <returns>1/2 of the sum of the squares of the errors for each neuron</returns>
        public static double Error(this Network network, TrainingSet set, double[][][] weights)
        {
            var outputs = network.Pulse(set.Inputs, weights);

            double error = 0;
            for (var i = 0; i < outputs.Length; i++)
            {
                error += Math.Pow(outputs[i] - set.Outputs[i], 2);
            }
            return 0.5 * error;
        }
        /**
         * Prints network output for the each element from the specified training set.
         * @param neuralNet neural network
         * @param trainingSet training set
         */
        public static void TestNeuralNetwork(NeuralNetwork neuralNet, TrainingSet trainingSet)
        {
            foreach (TrainingElement trainingElement in trainingSet.TrainingElements)
            {
                neuralNet.SetInput(trainingElement.Input);
                neuralNet.Calculate();
                double[] networkOutput = neuralNet.Output;

                Console.Write("Input: " + trainingElement.Input.ArrayString());
                Console.WriteLine(" Output: " + networkOutput.ArrayString());

            }
        }
Ejemplo n.º 8
0
 private void calculatePxValues(TrainingSet nonAnomaliesSet, TrainingSet anomaliesSet, PxFormula formula)
 {
     nonAnomaliesPxValues = new double[nonAnomaliesSet.getCountOfRecords()];
     for (int i = 0; i < nonAnomaliesSet.getCountOfRecords(); i++)
     {
         nonAnomaliesPxValues[i] = formula.calculate(nonAnomaliesSet.getAllRecords()[i]);
     }
     anomaliesPxValues = new double[anomaliesSet.getCountOfRecords()];
     for (int i = 0; i < anomaliesSet.getCountOfRecords(); i++)
     {
         anomaliesPxValues[i] = formula.calculate(anomaliesSet.getAllRecords()[i]);
     }
 }
Ejemplo n.º 9
0
        private void CalculatePxValues(TrainingSet nonAnomaliesSet, TrainingSet anomaliesSet, PxFormula formula)
        {
            this.nonAnomaliesPxValues = new double[nonAnomaliesSet.GetCountOfRecords()];

            for (var i = 0; i < nonAnomaliesSet.GetCountOfRecords(); i++)
            {
                this.nonAnomaliesPxValues[i] = formula.Calculate(nonAnomaliesSet.GetAllRecords()[i]);
            }

            this.anomaliesPxValues = new double[anomaliesSet.GetCountOfRecords()];

            for (var i = 0; i < anomaliesSet.GetCountOfRecords(); i++)
            {
                this.anomaliesPxValues[i] = formula.Calculate(anomaliesSet.GetAllRecords()[i]);
            }
        }
Ejemplo n.º 10
0
        public void CalcAccuracy(TrainingSet nonAnomaliesSet, TrainingSet anomaliesSet, PxFormula formula)
        {
            this.CalculatePxValues(nonAnomaliesSet, anomaliesSet, formula);
            
            var maxPxValue = formula.Calculate(formula.GetNu());
            var list = new List<KeyValuePair<int, double>>();
            var h = maxPxValue / 50;

            for (var i = 0; i < 50; i++)
            {
                var e = 0 + h * i;
                var count = this.anomaliesPxValues.Count(value => value < e);

                list.Add(new KeyValuePair<int, double>(count, e));
            }

            formula.SetE(list.First(x => x.Key == list.Max(y => y.Key)).Value);
        }
Ejemplo n.º 11
0
        public static TrainingSet toTrainingsset(List <UnpreparedTrainingsset> sets)
        {
            double[][] inputs  = new double[sets.Count][];
            double[][] outputs = new double[sets.Count][];
            int        i       = 0;

            foreach (UnpreparedTrainingsset set in sets)
            {
                inputs[i]  = boardToNeuronInput(set.inputGame, set.turn);
                outputs[i] = moveToNeuronOutput(set.outputMove);
                i++;
            }
            TrainingSet t = new TrainingSet();

            t.inputs  = inputs;
            t.results = outputs;
            return(t);
        }
        public IActionResult TrainNetwork(Guid neuralNetworkId)
        {
            if (!_mlpRepository.NeuralNetworkExists(neuralNetworkId))
            {
                return(NotFound());
            }
            var NeuralNetworkFromRepo          = _mlpRepository.GetFullNeuralNetwork(neuralNetworkId);
            List <TrainingDataDto> trainingSet = TrainingSet.GetTrainingSet(NeuralNetworkFromRepo.TrainingConfig);

            MultiLayerPerceptron.TrainNetwork(ref NeuralNetworkFromRepo, trainingSet);

            _mlpRepository.UpdateNeuralNetwork(NeuralNetworkFromRepo);
            if (!_mlpRepository.Save())
            {
                throw new Exception($"Updating neural network {neuralNetworkId} failed on save.");
            }

            return(Ok());
        }
Ejemplo n.º 13
0
        public int[,] CalculateWinnerFreqency(TrainingSet tas, out List <KohonenMapClassification> list)
        {
            int[,] result = new int[size.Width, size.Height];
            List <string> listNames = new List <string>();

            //  if (list == null)
            list = new List <KohonenMapClassification>();
            int i = 0;

            foreach (TrainingSample ts in tas.TrainingSamples)
            {
                PositionNeuron p = FindWinner(ts.InputVector);
                result[p.Coordinate.X, p.Coordinate.Y]++;
                string clsname = "X" + p.Coordinate.X.ToString() + "Y" + p.Coordinate.Y.ToString();
                if (listNames.Contains(clsname))
                {
                    var k = from c in list where c.ClassName == clsname select c;
                    k.First().ClassifiedInputPatternIndex.Add(i);
                }
                else
                {
                    listNames.Add(clsname);
                    KohonenMapClassification mapcls = new KohonenMapClassification(clsname)
                    {
                        X = p.Coordinate.X,
                        Y = p.Coordinate.Y
                    };
                    mapcls.ClassifiedInputPatternIndex.Add(i);
                    list.Add(mapcls);
                }
                i++;
            }
            ColorMap cmap = new ColorMap(Color.Blue, Color.Red);

            Color[] colors = cmap.GenerateUniqueColors(list.Count);
            i = 0;
            foreach (KohonenMapClassification cls in list)
            {
                cls.ColorIndicator = colors[i];
                i++;
            }
            return(result);
        }
Ejemplo n.º 14
0
        public void getTrainingsset()
        {
            String      trainingDataFile = @"trainng_italienisch_1.csv";
            TrainingSet trainset         = new TrainingSet();
            int         inputLength      = 35;
            int         outputLength     = 1;

            double[] input          = new double[inputLength];
            double[] outputErwartet = new double[outputLength];

            if (File.Exists(trainingDataFile))
            {
                string[] lines = File.ReadAllLines(trainingDataFile);
                trainset.inputs  = new double[lines.Length][];
                trainset.results = new double[lines.Length][];

                string[][] parts = new string[lines.Length][];
                for (int i = 0; i < lines.Length; i++)
                {
                    parts[i] = lines[i].Split(';');

                    input = getUTFD100ByteArray(parts[i][0], inputLength);
                    //output = getNetOutput(net, parts[i][0]);



                    outputErwartet = new double[outputLength];
                    Array.Clear(outputErwartet, 0, outputLength);
                    for (int k = 1; k <= outputLength; k++)
                    {
                        outputErwartet[k - 1] = double.Parse(parts[i][k]);
                        //Console.WriteLine("Erwarteter Wert für: output[" + (k - 1) + "]: " + double.Parse(parts[i][k]));
                    }
                    trainset.inputs[i]  = input;
                    trainset.results[i] = outputErwartet;
                }
            }
            else
            {
                throw new Exception("");
            }
            myNetwork.trainingsset = trainset;
        }
Ejemplo n.º 15
0
    void CreateTrainingSet()
    {
        if (trainingSetInputs == null || trainingSetInputs.Count == 0)
        {
            Debug.Log("You need to add training cases first!");
            return;
        }

        trainingSet = new TrainingSet(neurons, outputNum);
        List <double[]> tempInputs = trainingSetInputs;
        List <double>   tempOutput = trainingSetOutputs;

        for (int i = 0; i < tempInputs.Count; i++)
        {
            if (tempOutput[i] == 0)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    1, -1, -1, -1
                }));
            }
            else if (tempOutput[i] == 1)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    -1, 1, -1, -1
                }));
            }
            else if (tempOutput[i] == 2)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    -1, -1, 1, -1
                }));
            }
            else if (tempOutput[i] == 3)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    -1, -1, -1, 1
                }));
            }
        }

        neuralNetwork.Learn(this.trainingSet, epochs);
    }
        private void LoadTelemetryOnChart(TrainingSet video)
        {
            //if (video == null) return;
            //this.DataContext = video;

            //if (video.IsTelemetryLoaded)
            //{
            //    SmartDispatcher.BeginInvoke(() => profileChart.LoadTelemetry(video.Telemetry));
            //}
            //else
            //{
            //    video.TelemetryLoaded += (_sender, _e) =>
            //    {
            //        SmartDispatcher.BeginInvoke(() =>
            //        {
            //            profileChart.LoadTelemetry(video.Telemetry);
            //        });
            //    };
            //}
        }
Ejemplo n.º 17
0
 /// <summary>
 /// Calculates the "delta value" for a specified neuron.
 /// For output neurons, delta = (calculated - actual)*(calculated - calculated^2)
 /// For hidden neurons in level l, delta = (calculated - calculated^2)* (sum n in neurons in l+1 [ delta((l+1)[n]) * weight(l[n] -> (l+1)[n]) ])
 /// </summary>
 /// <param name="network">The network to calculate the delta on</param>
 /// <param name="set">The training set to calculate the delta on</param>
 /// <param name="innerLayer">The inner layer index to calculate the training set on</param>
 /// <param name="neuron">The neuron index to calculate the training set on</param>
 /// <param name="deltas">The delta values for the L+1 layer</param>
 /// <returns>The delta value for the specified neuron</returns>
 public static double Delta(Network network, TrainingSet set, int innerLayer, int neuron, double[] deltas = null)
 {
     var isOutputLayer = innerLayer == network.Layers.Length - 1;
     if (isOutputLayer)
     {
         var output = network.Pulse(set.Inputs)[neuron];
         return (output - set.Outputs[neuron]) * (output - Math.Pow(output, 2));
     }
     else
     {
         var outputs = network.PulseDetailed(set.Inputs, false);
         var actualOutput = outputs[innerLayer][neuron];
         var summation = 0.0;
         for (var n = 0; n < network.Weights[innerLayer + 1].Length; n++)
         {
             summation += deltas[n] * network.Weights[innerLayer + 1][n][neuron];
         }
         return (actualOutput - Math.Pow(actualOutput, 2)) * summation;
     }
 }
Ejemplo n.º 18
0
        static void MnistTest()
        {
            Console.WriteLine("Mnist challenge accepted!");
            Console.WriteLine();

            var trainSet = new TrainingSet();

            PopulationParameters populationParameters = new PopulationParameters(5, 100, 1, 2);
            MutationParameters   mutationParameters   = new MutationParameters();
            NetworkParameters    networkParameters    = new NetworkParameters(MnistDataset.ImageSize, MnistDataset.ClassCount);
            MnistModel           mnistModel           = new MnistModel(populationParameters, mutationParameters, networkParameters, trainSet);

            mnistModel.Search(2, 0.1, 100);

            Console.WriteLine();
            Console.WriteLine("Best individual");
            Console.WriteLine(mnistModel.BestIndividual);

            Console.WriteLine();
        }
Ejemplo n.º 19
0
        //Generate the classification tree and start training it recursively
        public void RunNetwork()
        {
            var validationSet = TrainingSet.FromDirectory(path + "/trainingdata");
            var trainingSet   = TrainingSet.FromDirectory(path + "/generateddata");

            root = trainingSet.Item2;
            Invalidate();
            //Invoke(new MethodInvoker(Invalidate));

            var t = new Trainer();

            var nc = new NetworkCreator();
            var nl = nc.CreateNetworks(root);

            t.Train(nl, trainingSet.Item1, validationSet.Item1);


            new Thread(() =>
            {
                var thisImagePath = path + "/trainingdata/documents/990/tax990.jpg";
                results           = t.Run(nl, trainingSet.Item2, thisImagePath);
                Invalidate();
                var iv     = new ImageView(Image.FromFile(thisImagePath));
                iv.Visible = false;
                iv.ShowDialog();

                thisImagePath = path + "/trainingdata/documents/1040/2012/tax1040_10.gif";
                results       = t.Run(nl, trainingSet.Item2, thisImagePath);
                Invalidate();
                iv         = new ImageView(Image.FromFile(thisImagePath));
                iv.Visible = false;
                iv.ShowDialog();

                thisImagePath = path + "/trainingdata/documents/1040/2010/tax1040_4.jpg";
                results       = t.Run(nl, trainingSet.Item2, thisImagePath);
                Invalidate();
                iv         = new ImageView(Image.FromFile(thisImagePath));
                iv.Visible = false;
                iv.ShowDialog();
            }).Start();
        }
Ejemplo n.º 20
0
        private static void Main()
        {
            var inputLabels = new List <string> {
                "x", "y"
            };
            var hiddenLayerCount = new List <int> {
                3, 4, 3
            };
            var outputLabels = new List <string> {
                "Above the line"
            };
            var trainingSet = new TrainingSet(10, outputLabels);
            var network     = new Network(inputLabels, outputLabels, hiddenLayerCount, ActivationFunctions.Sigmoid.Function);

            ConsoleKeyInfo consoleKeyInfo;

            do
            {
                Console.WriteLine("t) train \nb) set backpropagation \nv) set verbose");
                consoleKeyInfo = Console.ReadKey(false);
                Console.WriteLine();
                switch (consoleKeyInfo.KeyChar.ToString())
                {
                case "t":
                    network.Train(trainingSet);
                    break;

                case "b":
                    network.Backpropagate = !network.Backpropagate;
                    Console.WriteLine($"Backpropagate set to {network.Backpropagate}");
                    break;

                case "v":
                    network.Verbose = !network.Verbose;
                    Console.WriteLine($"Verbose mode set to {network.Verbose}");
                    break;
                }
            } while (consoleKeyInfo.Key != ConsoleKey.Escape);

            Console.ReadKey();
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Calculates the "delta value" for a specified neuron.
        /// For output neurons, delta = (calculated - actual)*(calculated - calculated^2)
        /// For hidden neurons in level l, delta = (calculated - calculated^2)* (sum n in neurons in l+1 [ delta((l+1)[n]) * weight(l[n] -> (l+1)[n]) ])
        /// </summary>
        /// <param name="network">The network to calculate the delta on</param>
        /// <param name="set">The training set to calculate the delta on</param>
        /// <param name="innerLayer">The inner layer index to calculate the training set on</param>
        /// <param name="neuron">The neuron index to calculate the training set on</param>
        /// <param name="deltas">The delta values for the L+1 layer</param>
        /// <returns>The delta value for the specified neuron</returns>
        public static double Delta(Network network, TrainingSet set, int innerLayer, int neuron, double[] deltas = null)
        {
            var isOutputLayer = innerLayer == network.Layers.Length - 1;

            if (isOutputLayer)
            {
                var output = network.Pulse(set.Inputs)[neuron];
                return((output - set.Outputs[neuron]) * (output - Math.Pow(output, 2)));
            }
            else
            {
                var outputs      = network.PulseDetailed(set.Inputs, false);
                var actualOutput = outputs[innerLayer][neuron];
                var summation    = 0.0;
                for (var n = 0; n < network.Weights[innerLayer + 1].Length; n++)
                {
                    summation += deltas[n] * network.Weights[innerLayer + 1][n][neuron];
                }
                return((actualOutput - Math.Pow(actualOutput, 2)) * summation);
            }
        }
Ejemplo n.º 22
0
        protected virtual TrainingSequence <T> GetTrainSamples()
        {
            // We can get TrainingSetOnDataSetReset during this call
            TrainingSequence <T> result;

            result = TrainingSet.GetNextSamples(Options.SequenceLength);

            if (result == null)
            {
                // A new epoch has come, try to get the sequence again
                result = TrainingSet.GetNextSamples(Options.SequenceLength);

                // If we couldn't get a sequence at the start of the epoch, this is a _bug.
                if (result == null)
                {
                    throw new InvalidOperationException("Couldn't get a training sequence at the start of an epoch!");
                }
            }

            return(result);
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Calculate the error of the network.
        /// </summary>
        /// <param name="trainingSet">The training set.</param>
        /// <returns>
        /// The error of the network.
        /// </returns>
        public double CalculateError(TrainingSet trainingSet)
        {
            // Calculate the network error with respect to all training patterns (the whole training set).
            double trainingSetError = 0.0;

            foreach (TrainingPattern trainingPattern in trainingSet.TrainingPatterns)
            {
                double[] outputVector        = Evaluate(trainingPattern.InputVector);
                double[] desiredOutputVector = trainingPattern.OutputVector;

                // Calculate the network error with respect to one training pattern.
                double trainingPatternError = 0;
                for (int i = 0; i < outputVector.Length; i++)
                {
                    trainingPatternError += Math.Pow((outputVector[i] - desiredOutputVector[i]), 2);
                }

                trainingSetError += 0.5 * trainingPatternError;
            }
            return(trainingSetError);
        }
Ejemplo n.º 24
0
 public void calcAccuracy(TrainingSet nonAnomaliesSet, TrainingSet anomaliesSet, PxFormula formula)
 {
     calculatePxValues(nonAnomaliesSet, anomaliesSet, formula);
     double max_p_x_value = formula.calculate(formula.getNu());
     List<KeyValuePair<int, double>> list = new List<KeyValuePair<int, double>>();
     double h = max_p_x_value / 50;
     for (int i = 0; i < 50; i++)
     {
         double e = 0 + h*i;
         int count = 0;
         foreach (double value in anomaliesPxValues)
         {
             if (value < e)
             {
                 count++;
             }
         }
         list.Add(new KeyValuePair<int, double>(count, e));
     }
     formula.setE(list.First(x => x.Key == list.Max(y => y.Key)).Value);
 }
Ejemplo n.º 25
0
        public void calcAccuracy(TrainingSet nonAnomaliesSet, TrainingSet anomaliesSet, PxFormula formula)
        {
            calculatePxValues(nonAnomaliesSet, anomaliesSet, formula);
            double max_p_x_value = formula.calculate(formula.getNu());
            List <KeyValuePair <int, double> > list = new List <KeyValuePair <int, double> >();
            double h = max_p_x_value / 50;

            for (int i = 0; i < 50; i++)
            {
                double e     = 0 + h * i;
                int    count = 0;
                foreach (double value in anomaliesPxValues)
                {
                    if (value < e)
                    {
                        count++;
                    }
                }
                list.Add(new KeyValuePair <int, double>(count, e));
            }
            formula.setE(list.First(x => x.Key == list.Max(y => y.Key)).Value);
        }
Ejemplo n.º 26
0
        //public Network Brain = new Network(18, new int[2] { 10, 10}, 9, new Function[] { new tanh(), new tanh(), new tanh() });
        public void createOrLoadTesData(string path = @"trainingTICTACTOE.json")
        {
            path = @"trainingTICTACTOE_new2.json";

            if (!File.Exists(path))
            {
                Console.WriteLine("Creating Testdata for Neuralbot");

                Console.WriteLine("converting to Trainingsset Object ...");
                this.Brain.trainingsset = CreateTrainingsSet();
                Console.Write("Created. ");
                Console.WriteLine(" Writing to File " + path);

                this.Brain.trainingsset.toFile(path);
            }
            else
            {
                Console.WriteLine("Reading Testdata from file " + path);
                this.Brain.trainingsset = TrainingSet.getFromFile(path);
            }
            Console.WriteLine("finished");
        }
        public void TestSpectralImagesAreMappedCorrectlyToBinaryCodes()
        {
            var firstTrack  = new[] { new double[] { 1, 1 }, new double[] { 2, 2 } };
            var secondTrack = new[] { new double[] { 3, 3 }, new double[] { 4, 4 } };

            binaryOutputHelper.Setup(helper => helper.GetBinaryCodes(1)).Returns(new[] { new byte[] { 0 }, new byte[] { 1 } });

            TrainingSet set = trainingDataProvider.MapSpectralImagesToBinaryOutputs(new List <double[][]> {
                firstTrack, secondTrack
            }, 1);

            Assert.AreEqual(4, set.Inputs.Length);
            Assert.AreEqual(4, set.Outputs.Length);
            AssertArraysAreEqual(new double[] { 1, 1 }, set.Inputs[0]);
            AssertArraysAreEqual(new double[] { 2, 2 }, set.Inputs[1]);
            AssertArraysAreEqual(new double[] { 3, 3 }, set.Inputs[2]);
            AssertArraysAreEqual(new double[] { 4, 4 }, set.Inputs[3]);
            AssertArraysAreEqual(new double[] { 0 }, set.Outputs[0]);
            AssertArraysAreEqual(new double[] { 0 }, set.Outputs[1]);
            AssertArraysAreEqual(new double[] { 1 }, set.Outputs[2]);
            AssertArraysAreEqual(new double[] { 1 }, set.Outputs[3]);
        }
Ejemplo n.º 28
0
        public void TrainingSetAdd()
        {
            var set = new TrainingSet(3);

            set.AddRecord(new[] { 3.0, 4.0, 5.0 });
            set.AddRecord(new[] { 3.0, 4.0, 5.0 });
            set.AddRecord(new[] { 3.0, 4.0, 5.0 });

            Assert.AreEqual(set.GetCountOfRecords(), 3);

            try
            {
                set.AddRecord(new[] { 3.0, 4.0, 5.0, 6.0 });
            }
            catch (Exception)
            {
                Assert.Pass();
                return;
            }

            Assert.Fail();
        }
Ejemplo n.º 29
0
        public void button4_Click(object sender, EventArgs e)
        {
            var openWin = new OpenFileDialog();

            openWin.DefaultExt = "txt";
            openWin.ShowDialog();
            string path = openWin.FileName;

            int nInput = Convert.ToInt32(textBox3.Text);
            int nOut   = Convert.ToInt32(textBox5.Text);

            TrainingSet train = new TrainingSet(nInput, nOut);

            string[] lines      = System.IO.File.ReadAllLines(path);
            string[] trainData  = new string[nInput + nOut];
            double[] trainInput = new double[nInput];
            double[] trainOut   = new double[nOut];

            foreach (string line in lines)
            {
                trainData = line.Split(' ');

                for (int i = 0; i < nInput; i++)
                {
                    trainInput[i] = Convert.ToDouble(trainData[i]);
                }

                for (int i = nInput; i < nOut; i++)
                {
                    trainOut[i - nInput] = Convert.ToDouble(trainData[i]);
                }


                train.Add(new TrainingSample(trainInput, trainOut));
            }

            network.Learn(train, Convert.ToInt32(textBox6.Text));
            MessageBox.Show("Training OK");
        }
Ejemplo n.º 30
0
        /// <summary>
        /// parse from data file path
        /// </summary>
        public void parseDataFile(string dataFile)
        {
            FileParsers parser = new FileParsers(dataFile, DATAFILE);

            int numSample = parser.DataLines.Count;

            String[] rawSample;
            for (int i = 0; i < numSample - 1; i++)
            {
                rawSample = parser.extractDataSample(i);

                int[] dataSample = new int[Attributes.Count];

                try
                {
                    // 2.a. Deal with all the attributes.
                    for (int j = 0; j < rawSample.Length; j++)
                    {
                        // There should be a 1-to-1 ordering between
                        // the internal attributes vector and the
                        // raw sample vector.
                        Attribute currAtt = (Attribute)Attributes.ElementAt(j);

                        int attPos = currAtt.getAttributeValuePosition((String)rawSample.ElementAt(j));
                        dataSample[j] = attPos;

                        if (j == 0)
                        {
                            TargetSums[attPos]++;
                        }
                    }
                }
                catch (Exception e)
                {
                }
                TrainingSet.Add(dataSample);
            }
        }
Ejemplo n.º 31
0
    // ここから状態を変化させるときの処理

    // Servedの状態が始まる。
    void StartServe()
    {
        Debug.Log("GameController:StartServe() ");
        state = State.Served;

        // 開始時刻を記録
        startTime = Time.time;

        // 出力用のファイルを作成
        string filename = directoryName + "/" + "data" + System.DateTime.Now.ToString("yyyyMMdd-HHmmss") + ".txt";

        dataLog = new StreamWriter(filename);
        RecordData("110 Ball Start");

        // トレーニングセットを取得
        currentTraining = GetNextTraining();

        // トレーニングセットに従い、プレイヤーの位置を移動
        SetPlayerPosition(currentTraining.initialPlayerPosition);

        // ラケットを初期化
        racketController.RestartRacket();

        // ヒット用のラケットとボールを隠す(下の方へ移動
        GameObject hitPointBall = GameObject.Find("BallHitInCourt");
        Transform  tr           = hitPointBall.GetComponent <Transform>();

        tr.position = new Vector3(0 - 100, 0);

        GameObject hitPointRacket = GameObject.Find("RacketHitInCourt");

        tr          = hitPointRacket.GetComponent <Transform>();
        tr.position = new Vector3(0 - 100, 0);

        scoreBoardController.enterPlay();
        // ビデオを再生開始
        PlayVideo();
    }
Ejemplo n.º 32
0
    public void SendInput(double i1, double i2, double o)
    {
        double result = CalcOutput(i1, i2);

        if (result == 0)
        {
            npc.GetComponent <Animator>().SetTrigger("Crouch");
            npc.GetComponent <Rigidbody>().isKinematic = false;
        }
        else
        {
            npc.GetComponent <Rigidbody>().isKinematic = true;
        }

        TrainingSet set = new TrainingSet();

        set.input = new double[2] {
            i1, i2
        };
        set.output = o;
        ts.Add(set);
        Train();
    }
Ejemplo n.º 33
0
    public override void OnInspectorGUI()
    {
        TrainingSet ts = target as TrainingSet;

        if (ts)
        {
            if (GUILayout.Button("Generate random patterns"))
            {
                ts.RandomPatterns();
            }
            if (GUILayout.Button("Shuffle patterns"))
            {
                ts.Shuffle();
            }
            if (GUILayout.Button("Reset"))
            {
                ts.Reset();
            }
            GUILayout.Label("Size of training set: " + ts.Patterns.Count);
        }

        GUILayout.Space(40);
        DrawDefaultInspector();
    }
Ejemplo n.º 34
0
        // train butonu
        private void btnTrain_Click(object sender, EventArgs e)
        {
            TrainingSet trainingSet = new TrainingSet(35, 5);

            trainingSet.Add(new TrainingSample(Dataset.Letters.A, new double[5] {
                1, 0, 0, 0, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.B, new double[5] {
                0, 1, 0, 0, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.C, new double[5] {
                0, 0, 1, 0, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.D, new double[5] {
                0, 0, 0, 1, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.E, new double[5] {
                0, 0, 0, 0, 1
            }));
            neuralNetwork.SetLearningRate(Convert.ToDouble(0.3));
            neuralNetwork.Learn(trainingSet, Convert.ToInt32(5000));
            btnTrain.Enabled      = false;
            btnGetResults.Enabled = true;
        }
Ejemplo n.º 35
0
        private void btnSave_Click(object sender, EventArgs e)
        {
            TrainingSet trainingSet = new TrainingSet()
            {
                Informations   = new InformationCollection(lstInformations.Items.Cast <Information>().ToList()),
                Sentences      = new SentenceCollection(lstSimilarSentences.Items.Cast <Sentence>()),
                WrongSentences = lstWrongSentences.Items.Cast <string>().ToList()
            };

            SaveFileDialog saveFileDialog = new SaveFileDialog()
            {
                AddExtension    = true,
                DefaultExt      = ".trainingset",
                OverwritePrompt = true
            };

            if (saveFileDialog.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            using (var textWriter = new StreamWriter(saveFileDialog.FileName))
                new XmlSerializer(typeof(TrainingSet)).Serialize(textWriter, trainingSet);
        }
 public static global::System.Xml.Schema.XmlSchemaComplexType GetTypedDataSetSchema(global::System.Xml.Schema.XmlSchemaSet xs) {
     TrainingSet ds = new TrainingSet();
     global::System.Xml.Schema.XmlSchemaComplexType type = new global::System.Xml.Schema.XmlSchemaComplexType();
     global::System.Xml.Schema.XmlSchemaSequence sequence = new global::System.Xml.Schema.XmlSchemaSequence();
     global::System.Xml.Schema.XmlSchemaAny any = new global::System.Xml.Schema.XmlSchemaAny();
     any.Namespace = ds.Namespace;
     sequence.Items.Add(any);
     type.Particle = sequence;
     global::System.Xml.Schema.XmlSchema dsSchema = ds.GetSchemaSerializable();
     if (xs.Contains(dsSchema.TargetNamespace)) {
         global::System.IO.MemoryStream s1 = new global::System.IO.MemoryStream();
         global::System.IO.MemoryStream s2 = new global::System.IO.MemoryStream();
         try {
             global::System.Xml.Schema.XmlSchema schema = null;
             dsSchema.Write(s1);
             for (global::System.Collections.IEnumerator schemas = xs.Schemas(dsSchema.TargetNamespace).GetEnumerator(); schemas.MoveNext(); ) {
                 schema = ((global::System.Xml.Schema.XmlSchema)(schemas.Current));
                 s2.SetLength(0);
                 schema.Write(s2);
                 if ((s1.Length == s2.Length)) {
                     s1.Position = 0;
                     s2.Position = 0;
                     for (; ((s1.Position != s1.Length) 
                                 && (s1.ReadByte() == s2.ReadByte())); ) {
                         ;
                     }
                     if ((s1.Position == s1.Length)) {
                         return type;
                     }
                 }
             }
         }
         finally {
             if ((s1 != null)) {
                 s1.Close();
             }
             if ((s2 != null)) {
                 s2.Close();
             }
         }
     }
     xs.Add(dsSchema);
     return type;
 }
Ejemplo n.º 37
0
        private void Start(object sender, EventArgs e)
        {
            CleanseGraph();
            EnableControls(false);
            curve.Color = enabledColor;

            if (!int.TryParse(txtCycles.Text, out cycles))
            {
                cycles = 10000;
            }
            if (!double.TryParse(txtLearningRate.Text, out learningRate))
            {
                learningRate = 0.25d;
            }
            if (!int.TryParse(txtNeuronCount.Text, out neuronCount))
            {
                neuronCount = 10;
            }

            if (cycles <= 0)
            {
                cycles = 10000;
            }
            if (learningRate < 0 || learningRate > 1)
            {
                learningRate = 0.25d;
            }
            if (neuronCount <= 0)
            {
                neuronCount = 10;
            }

            txtCycles.Text       = cycles.ToString();
            txtLearningRate.Text = learningRate.ToString();
            txtNeuronCount.Text  = neuronCount.ToString();

            LinearLayer  inputLayer  = new LinearLayer(1);
            SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(1, 1);

            for (int i = 0; i < curve.Points.Count; i++)
            {
                double xVal = curve.Points[i].X;
                for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01)
                {
                    trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y }));
                }
            }

            network.EndEpochEvent += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles);
                Application.DoEvents();
            });
            network.Learn(trainingSet, cycles);
            StopLearning(this, EventArgs.Empty);
        }
Ejemplo n.º 38
0
 /// <summary>
 /// Trains network for the specified training set and number of iterations 
 /// </summary>
 /// <param name="trainingSet">training set to learn</param>
 /// <param name="maxIterations">maximum numberof iterations to learn</param>
 public void Learn(TrainingSet trainingSet, int maxIterations)
 {
     this.MaxIterations = maxIterations;
     this.Learn(trainingSet);
 }
Ejemplo n.º 39
0
        /// <summary>
        /// Optimizes the weights for a network
        /// </summary>
        /// <param name="network">The network to optimize</param>
        /// <param name="sets">The training sets to use</param>
        /// <param name="trainingFactor">The training factor to use (directly related to the size of the weight changes)</param>
        /// <param name="rounds">The number of rounds to optimize for</param>
        /// <param name="ensureBetter">Ensure that the error has reduced before updating the weights</param>
        /// <returns>The optimized weights</returns>
        public static double[][][] Optimize(Network network, TrainingSet[] sets, double trainingFactor = 0.1, int rounds = 1, bool ensureBetter = false)
        {
            for (var r = 0; r < rounds; r++)
            {
                foreach (var set in sets)
                {
                    double[][][] preWeights = null;
                    var preError = 0.0;
                    if (ensureBetter)
                    {
                        preWeights = network.Weights.Select(layer => layer.Select(neuron => neuron.ToArray()).ToArray()).ToArray();
                        preError = network.Error(sets, network.Weights);
                    }

                    network.Weights = Optimize(network, set, trainingFactor);

                    if (ensureBetter)
                    {
                        var postError = network.Error(sets, network.Weights);
                        if (postError > preError)
                        {
                            network.Weights = preWeights;
                        }
                    }
                }
            }
            return network.Weights;
        }
Ejemplo n.º 40
0
    /// <summary>Train the neural network with a set of hardcoded training data.</summary>
    private void TrainNN()
    {
        // Set up a sufficent set of training data for the perceptron
        // Train the system to output a 1 for the state the system is in and 0 for every other state
        float[] NONE   = new float[] { 1, 0, 0, 0, 0, 0 };
        float[] FLEE   = new float[] { 0, 1, 0, 0, 0, 0 };
        float[] FIGHT  = new float[] { 0, 0, 1, 0, 0, 0 };
        float[] HEAL   = new float[] { 0, 0, 0, 1, 0, 0 };
        float[] PATROL = new float[] { 0, 0, 0, 0, 1, 0 };
        float[] FIND   = new float[] { 0, 0, 0, 0, 0, 1 };
        TrainingSet[] trainingData = new TrainingSet[] {
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, NONE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FLEE   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIGHT  ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, HEAL   ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, PATROL ),
            new TrainingSet( new float[] {	  0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0,    0	}, FIND   )
        };

        // Train the system with the above data
        // Note: Training will fail right now so it is disabled.
        //neuralNetwork.Train( trainingData, 1f, 1f, -0.2f, 0.2f );
    }
Ejemplo n.º 41
0
 /// <summary>
 /// Runs one learning iteration for the specified training set and notfies observers.
 /// This method does the the doLearningEpoch() and in addtion notifes observrs when iteration is done. 
 /// </summary>
 /// <param name="trainingSet">training set to learn</param>
 public void DoOneLearningIteration(TrainingSet trainingSet)
 {
     this.DoLearningEpoch(trainingSet);
     this.NotifyChange(); // notify observers
 }
Ejemplo n.º 42
0
 /// <summary>
 /// Invokes BeginEpochEvent
 /// </summary>
 /// <param name="currentIteration">
 /// Current training iteration
 /// </param>
 /// <param name="trainingSet">
 /// Training set which is about to be trained
 /// </param>
 protected override void OnBeginEpoch(int currentIteration, TrainingSet trainingSet)
 {
     meanSquaredError = 0d;
     isValidMSE = false;
     base.OnBeginEpoch(currentIteration, trainingSet);
 }
Ejemplo n.º 43
0
        public void TrainingSetGetDimmensionCount()
        {
            var set = new TrainingSet(3);

            Assert.AreEqual(set.GetDimensionsCount(), 3);
        }
Ejemplo n.º 44
0
        /// <summary>
        /// Optimizes weights for a given training set
        /// </summary>
        /// <param name="network">The network to optimize</param>
        /// <param name="set">The set to optimize for</param>
        /// <param name="trainingFactor">The training factor (how large the changes should be)</param>
        /// <returns>The optimized weights</returns>
        public static double[][][] Optimize(Network network, TrainingSet set, double trainingFactor = 0.1)
        {
            var outputs = network.PulseDetailed(set.Inputs, true);
            var deltas = new double[network.Weights.Length][];
            for (var layer = network.Weights.Length - 1; layer >= 0; layer--)
            {
                deltas[layer] = new double[network.Weights[layer].Length];
                for (var neuron = 0; neuron < network.Weights[layer].Length; neuron++)
                {
                    if (layer == network.Weights.Length - 1)
                    {
                        deltas[layer][neuron] = Delta(network, set, layer, neuron);
                    }
                    else
                    {
                        deltas[layer][neuron] = Delta(network, set, layer, neuron, deltas[layer + 1]);
                    }

                    for (var input = 0; input < network.Weights[layer][neuron].Length; input++)
                    {
                        var delta = deltas[layer][neuron];

                        var errorPrime = 0.0;
                        if (input < outputs[layer].Length)
                        {
                            //No need for layer-1 since the addition of the inputs pushes all the layers +1
                            errorPrime = delta * outputs[layer/* - 1*/][input]; //Error prime = (d Error) / (d weight)
                        }
                        else
                        {
                            //Assume it's a bias neuron of value 1
                            errorPrime = delta * 1;
                        }

                        var deltaWeight = (-1.0) * trainingFactor * errorPrime;

                        var preError = network.Error(set, network.Weights);
                        var preErrorWeight = network.Weights[layer][neuron][input];

                        network.Weights[layer][neuron][input] += deltaWeight;

                        var postError = network.Error(set, network.Weights);
                        if (postError > preError)
                        {
                            network.Weights[layer][neuron][input] -= deltaWeight;
                        }
                    }
                }
            }
            return network.Weights;
        }
Ejemplo n.º 45
0
        /// <summary>
        /// Trains the neural network for the given training set (Batch Training)
        /// </summary>
        /// <param name="trainingSet">
        /// The training set to use
        /// </param>
        /// <param name="trainingEpochs">
        /// Number of training epochs. (All samples are trained in some random order, in every
        /// training epoch)
        /// </param>
        /// <exception cref="ArgumentNullException">
        /// if <c>trainingSet</c> is <c>null</c>
        /// </exception>
        /// <exception cref="ArgumentException">
        /// if <c>trainingEpochs</c> is zero or negative
        /// </exception>
        public override void Learn(TrainingSet trainingSet, int trainingEpochs)
        {
            this.trainingSet = trainingSet;
            this.trainingEpochs = trainingEpochs;

            // Validate
            Helper.ValidateNotNull(trainingSet, "trainingSet");
            Helper.ValidatePositive(trainingEpochs, "trainingEpochs");
            if ((trainingSet.InputVectorLength != inputLayer.NeuronCount)
                || (trainingMethod == TrainingMethod.Supervised && trainingSet.OutputVectorLength != outputLayer.NeuronCount)
                || (trainingMethod == TrainingMethod.Unsupervised && trainingSet.OutputVectorLength != 0))
            {
                throw new ArgumentException("Invalid training set");
            }

            // Reset isStopping
            isStopping = false;

            // Re-Initialize the network
            Initialize();

            SPSO_2007.Algorithm pso = new SPSO_2007.Algorithm(PsoProblem,PsoParameters);
            pso.StartRun();

            for (currentIteration = 0; currentIteration < trainingEpochs;)
            {
                //int[] randomOrder = Helper.GetRandomOrder(trainingSet.TrainingSampleCount);
                // Beginning a new training epoch
                OnBeginEpoch(currentIteration, trainingSet);

                // Check for Jitter Epoch
                /*
                if (jitterEpoch > 0 && currentIteration % jitterEpoch == 0)
                {
                    for (int i = 0; i < connectors.Count; i++)
                    {
                        connectors[i].Jitter(jitterNoiseLimit);
                    }
                }
                */

                currentIteration = pso.NextIteration();

                meanSquaredError = pso.BestFitness * trainingSet.TrainingSampleCount;

                // Training Epoch successfully complete
                OnEndEpoch(currentIteration, trainingSet);

                // Check if we need to stop
                if (isStopping) {
                    pso.EndRun();
                    isStopping = false;
                    return;
                }
            }
        }
Ejemplo n.º 46
0
 /// <summary>
 /// Override this method to implement specific learning procedures 
 /// </summary>
 /// <param name="trainingSet">training set</param>
 public abstract void Learn(TrainingSet trainingSet);
Ejemplo n.º 47
0
 /// <summary>
 /// This method does one learning epoch for the unsupervised learning rules.
 /// It iterates through the training set and trains network weights for each
 /// element. Stops learning after one epoch. 
 /// </summary>
 /// <param name="trainingSet">training set for training network</param>
 public override void DoLearningEpoch(TrainingSet trainingSet)
 {
     base.DoLearningEpoch(trainingSet);
     StopLearning(); // stop learning ahter one learning epoch
 }
Ejemplo n.º 48
0
        /**
         * Generate the training data for the training sunspot years.
         * @return The training data.
         */
        public TrainingSet GenerateTraining()
        {
            TrainingSet result = new TrainingSet(WINDOW_SIZE, 1);

            for (int year = TRAIN_START; year < TRAIN_END; year++)
            {
                double[] input = new double[WINDOW_SIZE];
                double[] ideal = new double[1];

                int index = 0;
                for (int i = year - WINDOW_SIZE; i < year; i++)
                {
                    input[index++] = this.normalizedSunspots[i];
                }

                ideal[0] = this.normalizedSunspots[year];

                result.Add(new SupervisedTrainingElement(input, ideal));
            }
            return result;
        }
Ejemplo n.º 49
0
        static void Main(string[] args)
        {
            Console.WriteLine("{0:.10}, {1}", "Hello", "World");

            // Step 1 : Alternative A : Building a training set manually
            // ---------------------------------------------------------

            int inputVectorLength  = 2;
            int outputVectorLength = 1;

            TrainingSet trainingSet = new TrainingSet(inputVectorLength, outputVectorLength);

            TrainingPattern trainingPattern = new TrainingPattern(new double[2] {
                0.0, 0.0
            }, new double[1] {
                0.0
            });

            trainingSet.Add(trainingPattern);
            trainingPattern = new TrainingPattern(new double[2] {
                0.0, 1.0
            }, new double[1] {
                1.0
            });
            trainingSet.Add(trainingPattern);
            trainingPattern = new TrainingPattern(new double[2] {
                1.0, 0.0
            }, new double[1] {
                1.0
            });
            trainingSet.Add(trainingPattern);
            trainingPattern = new TrainingPattern(new double[2] {
                1.0, 1.0
            }, new double[1] {
                0.0
            });
            trainingSet.Add(trainingPattern);

            // Step 2 : Building a blueprint of a network
            // ------------------------------------------

            LayerBlueprint inputLayerBlueprint = new LayerBlueprint(inputVectorLength);

            ActivationLayerBlueprint[] hiddenLayerBlueprints = new ActivationLayerBlueprint[1];
            hiddenLayerBlueprints[0] = new ActivationLayerBlueprint(2, new LogisticActivationFunction());
            ActivationLayerBlueprint outputLayerBlueprint = new ActivationLayerBlueprint(outputVectorLength, new LogisticActivationFunction());

            NetworkBlueprint networkBlueprint = new NetworkBlueprint(inputLayerBlueprint, hiddenLayerBlueprints, outputLayerBlueprint);

            // Step 3 : Building a network
            // ---------------------------

            Network network = new Network(networkBlueprint);

            Console.WriteLine(network.ToString());

            // Step 4 : Building a teacher
            // ---------------------------

            ITeacher teacher = new AntColonyOptimizationTeacher(trainingSet, null, null);

            // Step 5 : Training the network
            // -----------------------------

            int         maxIterationCount        = 10000;
            double      maxTolerableNetworkError = 1e-3;
            TrainingLog trainingLog = teacher.Train(network, maxIterationCount, maxTolerableNetworkError);

            Console.WriteLine("Number of runs used : " + trainingLog.RunCount);
            Console.WriteLine("Number of iterations used : " + trainingLog.IterationCount);
            Console.WriteLine("Minimum network error achieved : " + trainingLog.NetworkError);

            // Step 6 : Using the trained network
            // ----------------------------------

            foreach (TrainingPattern tp in trainingSet.TrainingPatterns)
            {
                double[] inputVector  = tp.InputVector;
                double[] outputVector = network.Evaluate(inputVector);
                Console.WriteLine(tp.ToString() + " -> " + TrainingPattern.VectorToString(outputVector));
            }
        }
Ejemplo n.º 50
0
 /// <summary>
 /// Invokes EndEpochEvent
 /// </summary>
 /// <param name="currentIteration">
 /// Current training iteration
 /// </param>
 /// <param name="trainingSet">
 /// Training set which got trained successfully this epoch
 /// </param>
 protected override void OnEndEpoch(int currentIteration, TrainingSet trainingSet)
 {
     meanSquaredError /= trainingSet.TrainingSampleCount;
     isValidMSE = true;
     base.OnEndEpoch(currentIteration, trainingSet);
 }
 public static global::System.Xml.Schema.XmlSchemaComplexType GetTypedTableSchema(global::System.Xml.Schema.XmlSchemaSet xs) {
     global::System.Xml.Schema.XmlSchemaComplexType type = new global::System.Xml.Schema.XmlSchemaComplexType();
     global::System.Xml.Schema.XmlSchemaSequence sequence = new global::System.Xml.Schema.XmlSchemaSequence();
     TrainingSet ds = new TrainingSet();
     global::System.Xml.Schema.XmlSchemaAny any1 = new global::System.Xml.Schema.XmlSchemaAny();
     any1.Namespace = "http://www.w3.org/2001/XMLSchema";
     any1.MinOccurs = new decimal(0);
     any1.MaxOccurs = decimal.MaxValue;
     any1.ProcessContents = global::System.Xml.Schema.XmlSchemaContentProcessing.Lax;
     sequence.Items.Add(any1);
     global::System.Xml.Schema.XmlSchemaAny any2 = new global::System.Xml.Schema.XmlSchemaAny();
     any2.Namespace = "urn:schemas-microsoft-com:xml-diffgram-v1";
     any2.MinOccurs = new decimal(1);
     any2.ProcessContents = global::System.Xml.Schema.XmlSchemaContentProcessing.Lax;
     sequence.Items.Add(any2);
     global::System.Xml.Schema.XmlSchemaAttribute attribute1 = new global::System.Xml.Schema.XmlSchemaAttribute();
     attribute1.Name = "namespace";
     attribute1.FixedValue = ds.Namespace;
     type.Attributes.Add(attribute1);
     global::System.Xml.Schema.XmlSchemaAttribute attribute2 = new global::System.Xml.Schema.XmlSchemaAttribute();
     attribute2.Name = "tableTypeName";
     attribute2.FixedValue = "TrainingDataTable";
     type.Attributes.Add(attribute2);
     type.Particle = sequence;
     global::System.Xml.Schema.XmlSchema dsSchema = ds.GetSchemaSerializable();
     if (xs.Contains(dsSchema.TargetNamespace)) {
         global::System.IO.MemoryStream s1 = new global::System.IO.MemoryStream();
         global::System.IO.MemoryStream s2 = new global::System.IO.MemoryStream();
         try {
             global::System.Xml.Schema.XmlSchema schema = null;
             dsSchema.Write(s1);
             for (global::System.Collections.IEnumerator schemas = xs.Schemas(dsSchema.TargetNamespace).GetEnumerator(); schemas.MoveNext(); ) {
                 schema = ((global::System.Xml.Schema.XmlSchema)(schemas.Current));
                 s2.SetLength(0);
                 schema.Write(s2);
                 if ((s1.Length == s2.Length)) {
                     s1.Position = 0;
                     s2.Position = 0;
                     for (; ((s1.Position != s1.Length) 
                                 && (s1.ReadByte() == s2.ReadByte())); ) {
                         ;
                     }
                     if ((s1.Position == s1.Length)) {
                         return type;
                     }
                 }
             }
         }
         finally {
             if ((s1 != null)) {
                 s1.Close();
             }
             if ((s2 != null)) {
                 s2.Close();
             }
         }
     }
     xs.Add(dsSchema);
     return type;
 }
 public virtual int Fill(TrainingSet.TrainingDataTable dataTable) {
     Adapter.SelectCommand = CommandCollection[0];
     if ((ClearBeforeFill == true)) {
         dataTable.Clear();
     }
     int returnValue = Adapter.Fill(dataTable);
     return returnValue;
 }
Ejemplo n.º 53
0
 public void setTrainingSet(TrainingSet trainingSet)
 {
     this._trainingSet = trainingSet;
 }
Ejemplo n.º 54
0
        public void Run()
        {
            var trainingSet = new TrainingSet();

            Console.WriteLine(trainingSet.GetType() + " fdasfsdf");
        }
Ejemplo n.º 55
0
 /// <summary>
 /// Override this method to implement specific learning epoch - one learning iteration, one pass through whole training set 
 /// </summary>
 /// <param name="trainingSet">training set</param>
 public abstract void DoLearningEpoch(TrainingSet trainingSet);
Ejemplo n.º 56
0
        static void IrisTest2()
        {
            var trainingSet  = new TrainingSet(new double[] { 1, 0 }, 1);
            var trainingSet2 = new TrainingSet(new double[] { 1, 0, 1, 1 },
                                               new double[][]
            {
                new double[] { 1 },
                new double[] { 1, 0 },
                new double[] { 1, 0, 1 }
            });

            var irisTrainingSetSample = new TrainingSet(new double[] { 5.1, 3.5, 1.4, 0.2 }, new double[] { 0, 0, 1 });

            //4,5,3
            var network = new NeuralNetwork();

            //network.CreateInputLayer(4);
            network.CreateInputLayer(0);
            var inputLayer = network.InputLayer;// new InputLayer(0, new ReLu(), new WeightedSum());
            var i1         = new InputNeuron(new ReLu(), new WeightedSum(), 0.2);
            var i2         = new InputNeuron(new ReLu(), new WeightedSum(), -0.5);
            var i3         = new InputNeuron(new ReLu(), new WeightedSum(), 0.5);
            var i4         = new InputNeuron(new ReLu(), new WeightedSum(), 0.4675);

            inputLayer.Neurons.Add(i1);
            inputLayer.Neurons.Add(i2);
            inputLayer.Neurons.Add(i3);
            inputLayer.Neurons.Add(i4);
            network.CreateHiddenLayer(5, new ReLu(), new WeightedSum());
            network.CreateHiddenLayer(3, new ReLu(), new WeightedSum());

            for (int i = 0; i < network.Layers.Count; i++)
            {
                for (int j = 0; j < network.Layers[i].Neurons.Count; j++)
                {
                    for (int k = 0; k < network.Layers[i].Neurons[j].Inputs.Count; k++)
                    {
                        network.Layers[i].Neurons[j].Inputs[k].Name = $"L{i}:N{j}:IS{k}";
                    }
                    for (int k = 0; k < network.Layers[i].Neurons[j].Outputs.Count; k++)
                    {
                        network.Layers[i].Neurons[j].Outputs[k].Name = $"L{i}:N{j}:OS{k}";
                    }
                }
            }

            network.Train(irisTrainingSetSample, 20, 0.01);

            Console.WriteLine("=====Outputs from final layer=====");
            var result = network.GetOutput();

            for (int i = 0; i < result.Count; i++)
            {
                Console.WriteLine($"Neuron_{i}:{result[i]}");
            }
            Console.WriteLine("=====Softmax from final layer=====");
            var resultSoftmax = network.GetOutputSoftMax();

            for (int i = 0; i < result.Count; i++)
            {
                Console.WriteLine($"Neuron_{i}:{resultSoftmax[i]}");
            }
            Console.Read();
        }
Ejemplo n.º 57
0
        public override void Learn(TrainingSet trainingSet)
        {
            this.Reset();

            while (!IsStopped)
            {
                DoLearningEpoch(trainingSet);
                this.currentIteration++;
                if (iterationsLimited && (currentIteration == maxIterations))
                {
                    StopLearning();
                }
                else if (!iterationsLimited && (currentIteration == int.MaxValue))
                {
                    // restart iteration counter since it has reached max value and iteration numer is not limited
                    this.currentIteration = 1;
                }

                this.NotifyChange(); // notify observers

                // Thread safe pause
                if (this.pausedLearning)
                    lock (this)
                    {
                        while (this.pausedLearning)
                        {
                            try
                            {
                                //this.wait();
                            }
                            catch (Exception) { }
                        }
                    }

            }
        }
Ejemplo n.º 58
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="forecastingSession"></param>
        /// <param name="forecastingLog"></param>
        private static void Forecast(ForecastingSession forecastingSession, ForecastingLog forecastingLog)
        {
            // Step 0 : Read from the Forecasting Session
            // ------------------------------------------

            string[] words = forecastingSession.Read();

            // The size of the test set.
            string testSetSizeString = words[0].Trim();
            int    testSetSize       = Int32.Parse(testSetSizeString);

            // The lags.
            string lagsString = words[1].Trim();

            string[] lagStrings = lagsString.Split(',');
            int[]    lags       = new int[lagStrings.Length];
            for (int i = 0; i < lags.Length; i++)
            {
                lags[i] = Int32.Parse(lagStrings[i]);
            }

            // The leaps.
            string leapsString = words[2].Trim();

            string[] leapStrings = leapsString.Split(',');
            int[]    leaps       = new int[leapStrings.Length];
            for (int i = 0; i < leaps.Length; i++)
            {
                leaps[i] = Int32.Parse(leapStrings[i]);
            }

            // The numner of hidden neurons.
            string hiddenNeuronCountString = words[3].Trim();
            int    hiddenNeuronCount       = Int32.Parse(hiddenNeuronCountString);

            // DEBUG : "Lags; Number of hidden neurons"
            Console.WriteLine(lagsString + "; " + hiddenNeuronCountString);

            // Step 1 : Alternative A : Building a training set (and a testing set) manually
            // -----------------------------------------------------------------------------

            // The training set.
            TrainingSet trainingSet = timeSeries.BuildTrainingSet(lags, leaps);

            // The testing set.
            TrainingSet testSet = trainingSet.SeparateTestSet(trainingSet.Size - testSetSize, testSetSize);

            // Step 2 : Building a blueprint of a network
            // ------------------------------------------

            // The input layer blueprint.
            LayerBlueprint inputLayerBlueprint = new LayerBlueprint(lags.Length);

            // The hidden layer blueprint.
            ActivationLayerBlueprint hiddenlayerBlueprint = new ActivationLayerBlueprint(hiddenNeuronCount);

            // The output layer blueprint.
            ActivationLayerBlueprint outputLayerBlueprint = new ActivationLayerBlueprint(leaps.Length, new LinearActivationFunction());

            // The network blueprint.
            NetworkBlueprint networkBlueprint = new NetworkBlueprint(inputLayerBlueprint, hiddenlayerBlueprint, outputLayerBlueprint);

            // Step 3 : Building a network
            // ---------------------------

            // The network.
            Network network = new Network(networkBlueprint);

            // Step 4 : Building a teacher
            // ---------------------------

            BackpropagationTeacher teacher = new BackpropagationTeacher(trainingSet, null, testSet);

            // Step 5 : Training the network
            // -----------------------------

            int         maxRunCount              = 10;
            int         maxIterationCount        = 10000;
            double      maxTolerableNetworkError = 0.0;
            TrainingLog tl = teacher.Train(network, maxRunCount, maxIterationCount, maxTolerableNetworkError);

            // Step 6 : Write into the Forecasting Log
            // ---------------------------------------

            words = new string[10] {
                lagsString,
                trainingSet.Size.ToString(),
                hiddenNeuronCountString,
                network.SynapseCount.ToString(),
                tl.RSS_TrainingSet.ToString(),
                tl.RSD_TrainingSet.ToString(),
                tl.AIC.ToString(),
                tl.BIC.ToString(),
                tl.RSS_TestSet.ToString(),
                tl.RSD_TestSet.ToString()
            };
            forecastingLog.Write(words);

            // DEBUG : "RSS (within-sample); RSD (within-sample); AIC; BIC; RSS (out-of-sample); RSD (out-of-sample)"
            Console.WriteLine(tl.RSS_TrainingSet.ToString() + "; " + tl.RSD_TrainingSet.ToString() + "; " + tl.AIC.ToString() + "; " + tl.BIC.ToString() + "; " + tl.RSS_TestSet.ToString() + "; " + tl.RSD_TestSet.ToString());
        }
Ejemplo n.º 59
0
        public override void DoLearningEpoch(TrainingSet trainingSet)
        {
            base.DoLearningEpoch(trainingSet);

            if (currentIteration > 0)
            {
                if (UseDynamicLearningRate) AdjustLearningRate();
                if (UseDynamicMomentum) AdjustMomentum();
            }
        }
Ejemplo n.º 60
0
 public override void Learn(TrainingSet trainingSet)
 {
     for (int phase = 0; phase < 2; phase++)
     {
         for (int k = 0; k < iterations[phase]; k++)
         {
             IEnumerator<TrainingElement> e = trainingSet.GetEnumerator();
             while (e.MoveNext() && !IsStopped)
             {
                 TrainingElement tE = e.Current;
                 LearnPattern(tE, nR[phase]);
             } // while
             currentIteration = k;
             this.NotifyChange();
             if (IsStopped) return;
         } // for k
         LearningRate = LearningRate * 0.5;
     } // for phase
 }