/// <summary> /// Generate random data. /// </summary> /// <param name="seed">The seed to use.</param> /// <param name="rows">The number of rows to generate.</param> /// <param name="cols">The number of columns to generate.</param> /// <param name="low">The low value.</param> /// <param name="high">The high value.</param> /// <param name="distort">The distortion factor.</param> /// <returns>The data set.</returns> public DataHolder Generate(int seed, int rows, int cols, double low, double high, double distort) { IGenerateRandom rnd = new MersenneTwisterGenerateRandom((uint)seed); var ideal = new double[rows][]; var actual = new double[rows][]; for (int row = 0; row < rows; row++) { for (int col = 0; col < cols; col++) { ideal[row] = new double[cols]; actual[row] = new double[cols]; ideal[row][col] = rnd.NextDouble(low, high); actual[row][col] = ideal[row][col] + (rnd.NextGaussian() * distort); } } var result = new DataHolder { Actual = actual, Ideal = ideal }; return(result); }
public void TestBasic2() { UInt32[] seed = { 1, 2, 3 }; var rnd = new MersenneTwisterGenerateRandom(seed); Assert.AreEqual(6.09861274980219, rnd.NextDouble(10), AIFH.DefaultPrecision); }
/// <summary> /// Construct a deep belief neural network. /// </summary> /// <param name="inputCount">The input count.</param> /// <param name="hidden">The counts for the hidden layers.</param> /// <param name="outputCount">The output neuron count.</param> public DeepBeliefNetwork(int inputCount, int[] hidden, int outputCount) { int inputSize; _layers = new HiddenLayer[hidden.Length]; _rbm = new RestrictedBoltzmannMachine[hidden.Length]; for (var i = 0; i < _rbm.Length; i++) { if (i == 0) { inputSize = inputCount; } else { inputSize = hidden[i - 1]; } _layers[i] = new HiddenLayer(this, inputSize, hidden[i]); _rbm[i] = new RestrictedBoltzmannMachine(_layers[i]); } _outputLayer = new DeepLayer(this, hidden[_layers.Length - 1], outputCount); Random = new MersenneTwisterGenerateRandom(); }
/// <summary> /// Run the example. /// </summary> public void Process() { // read the iris data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); Stream res = assembly.GetManifestResourceStream("AIFH_Vol2.Resources.iris.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // The following ranges are setup for the Iris data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.NormalizeRange(0, -1, 1); ds.NormalizeRange(1, -1, 1); ds.NormalizeRange(2, -1, 1); ds.NormalizeRange(3, -1, 1); IDictionary <string, int> species = ds.EncodeOneOfN(4); istream.Close(); var codec = new RBFNetworkGenomeCODEC(4, RbfCount, 3); IList <BasicData> trainingData = ds.ExtractSupervised(0, codec.InputCount, 4, codec.OutputCount); IPopulation pop = InitPopulation(rnd, codec); IScoreFunction score = new ScoreRegressionData(trainingData); var genetic = new BasicEA(pop, score) { CODEC = codec }; genetic.AddOperation(0.7, new Splice(codec.Size / 3)); genetic.AddOperation(0.3, new MutatePerturb(0.1)); PerformIterations(genetic, 100000, 0.05, true); var winner = (RBFNetwork)codec.Decode(genetic.BestGenome); QueryOneOfN(winner, trainingData, species); }
public void TestInt() { var rnd = new MersenneTwisterGenerateRandom(1); foreach (int aIntTest in IntTest) { int g = rnd.NextInt(); Assert.AreEqual(g, aIntTest, AIFH.DefaultPrecision); } }
public void TestFloat() { var rnd = new MersenneTwisterGenerateRandom(1); foreach (float aFloatTest in FloatTest) { var l = (float)rnd.NextFloat(); Assert.AreEqual(l, aFloatTest, AIFH.DefaultPrecision); } }
public void TestDouble() { var rnd = new MersenneTwisterGenerateRandom(1); foreach (double aDoubleTest in DoubleTest) { double g = rnd.NextDouble(); Assert.AreEqual(g, aDoubleTest, AIFH.DefaultPrecision); } }
public void TestGenerateBoolean() { var rnd = new MersenneTwisterGenerateRandom(1); foreach (bool aBooleanTest in BooleanTest) { bool g = rnd.NextBoolean(); Assert.AreEqual(g, aBooleanTest); } }
/// <inheritdoc /> public void Randomize() { IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); for (int i = 0; i < _data.Length; i++) { _data[i] = rnd.NextDouble() * 2.0 - 1.0; } SortData(); }
public void TestIntRange() { var rnd = new MersenneTwisterGenerateRandom(1); foreach (int aIntRangeTest in IntRangeTest) { int g = rnd.NextInt(0, 10); Assert.AreEqual(g, aIntRangeTest); } }
public void TestGaussianFloat() { var rnd = new MersenneTwisterGenerateRandom(1); foreach (double aGaussianTest in GaussianTest) { double g = rnd.NextGaussian(); Assert.AreEqual(g, aGaussianTest, AIFH.DefaultPrecision); } }
/// <summary> /// Setup and solve the TSP. /// </summary> public void Solve() { IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); var builder = new StringBuilder(); InitCities(rnd); IPopulation pop = InitPopulation(rnd); IScoreFunction score = new TSPScore(_cities); _genetic = new BasicEA(pop, score); _genetic.AddOperation(0.9, new SpliceNoRepeat(Cities / 3)); _genetic.AddOperation(0.1, new MutateShuffle()); int sameSolutionCount = 0; int iteration = 1; double lastSolution = double.MaxValue; while (sameSolutionCount < MaxSameSolution) { _genetic.Iteration(); double thisSolution = _genetic.LastError; builder.Length = 0; builder.Append("Iteration: "); builder.Append(iteration++); builder.Append(", Best Path Length = "); builder.Append(thisSolution); Console.WriteLine(builder.ToString()); if (Math.Abs(lastSolution - thisSolution) < 1.0) { sameSolutionCount++; } else { sameSolutionCount = 0; } lastSolution = thisSolution; } Console.WriteLine("Good solution found:"); var best = (IntegerArrayGenome)_genetic.BestGenome; DisplaySolution(best); _genetic.FinishTraining(); }
/// <summary> /// Place the cities in random locations. /// </summary> private void InitCities() { IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); _cities = new City[Cities]; for (int i = 0; i < _cities.Length; i++) { int xPos = rnd.NextInt(MapSize); int yPos = rnd.NextInt(MapSize); _cities[i] = new City(xPos, yPos); } }
/// <summary> /// Construct the backpropagation trainer. /// </summary> /// <param name="theNetwork">The network to train.</param> /// <param name="theTraining">The training data to use.</param> /// <param name="theLearningRate">The learning rate. Can be changed as training runs.</param> /// <param name="theMomentum">The momentum. Can be changed as training runs.</param> public BackPropagation(BasicNetwork theNetwork, IList <BasicData> theTraining, double theLearningRate, double theMomentum) { BatchSize = 500; Stochastic = new MersenneTwisterGenerateRandom(); NesterovUpdate = true; _network = theNetwork; _training = theTraining; LearningRate = theLearningRate; Momentum = theMomentum; _gradients = new GradientCalc(_network, new CrossEntropyErrorFunction(), this); _lastDelta = new double[theNetwork.Weights.Length]; }
public void RandomizeParticles() { IGenerateRandom random = new MersenneTwisterGenerateRandom(); for (int i = 0; i < ParticleCount; i++) { var p = new Particle(2); p.Location[0] = random.NextDouble(OutputCanvas.ActualWidth); p.Location[1] = random.NextDouble(OutputCanvas.ActualHeight); p.Velocity[0] = 3; p.Velocity[1] = random.NextDouble(2.0 * Math.PI); _particles.Add(p); } }
/// <summary> /// Run the example. /// </summary> public void Process() { // read the iris data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); Stream res = assembly.GetManifestResourceStream("AIFH_Vol2.Resources.iris.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // The following ranges are setup for the Iris data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.NormalizeRange(0, -1, 1); ds.NormalizeRange(1, -1, 1); ds.NormalizeRange(2, -1, 1); ds.NormalizeRange(3, -1, 1); IDictionary <string, int> species = ds.EncodeOneOfN(4); var particles = new RBFNetwork[ParticleCount]; for (int i = 0; i < particles.Length; i++) { particles[i] = new RBFNetwork(4, 4, 3); particles[i].Reset(rnd); } IList <BasicData> trainingData = ds.ExtractSupervised(0, 4, 4, 3); IScoreFunction score = new ScoreRegressionData(trainingData); var train = new TrainPSO(particles, score); PerformIterations(train, 100000, 0.05, true); var winner = (RBFNetwork)train.BestParticle; QueryOneOfN(winner, trainingData, species); }
/// <summary> /// Demonstrate the crossover splice operator. Two offspring will be created by swapping three /// segments of the parents (two cut points). Some genes may repeat. /// </summary> public static void Splice() { Console.WriteLine("Crossover Splice"); // Create a random number generator IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // Create a new population. IPopulation pop = new BasicPopulation(); pop.GenomeFactory = new IntegerArrayGenomeFactory(10); // Create a trainer with a very simple score function. We do not care // about the calculation of the score, as they will never be calculated. IEvolutionaryAlgorithm train = new BasicEA(pop, new NullScore()); // Create a splice operator, length = 5. Use it 1.0 (100%) of the time. var opp = new Splice(5); train.AddOperation(1.0, opp); // Create two parents, the genes are set to 1,2,3,4,5,7,8,9,10 // and 10,9,8,7,6,5,4,3,2,1. var parents = new IntegerArrayGenome[2]; parents[0] = (IntegerArrayGenome)pop.GenomeFactory.Factor(); parents[1] = (IntegerArrayGenome)pop.GenomeFactory.Factor(); for (int i = 1; i <= 10; i++) { parents[0].Data[i - 1] = i; parents[1].Data[i - 1] = 11 - i; } // Create an array to hold the offspring. var offspring = new IntegerArrayGenome[2]; // Perform the operation opp.PerformOperation(rnd, parents, 0, offspring, 0); // Display the results Console.WriteLine("Parent 1: " + string.Join(",", parents[0].Data)); Console.WriteLine("Parent 2: " + string.Join(",", parents[1].Data)); Console.WriteLine("Offspring 1: " + string.Join(",", offspring[0].Data)); Console.WriteLine("Offspring 2: " + string.Join(",", offspring[1].Data)); }
/// <summary> /// Demonstrate the mutate peterb operator. An offspring will be created by randomly changing each /// gene. /// </summary> public static void MutatePeterb() { Console.WriteLine("Mutate Perturb"); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // Create a new population. IPopulation pop = new BasicPopulation(); pop.GenomeFactory = new DoubleArrayGenomeFactory(5); // Create a trainer with a very simple score function. We do not care // about the calculation of the score, as they will never be calculated. IEvolutionaryAlgorithm train = new BasicEA(pop, new NullScore()); var opp = new MutatePerturb(0.1); train.AddOperation(1.0, opp); // Create a peterb operator. Use it 1.0 (100%) of the time. var parents = new DoubleArrayGenome[1]; parents[0] = (DoubleArrayGenome)pop.GenomeFactory.Factor(); parents[0].Population = pop; for (int i = 1; i <= 5; i++) { parents[0].Data[i - 1] = i; } // Create an array to hold the offspring. var offspring = new DoubleArrayGenome[1]; offspring[0] = new DoubleArrayGenome(5); // Perform the operation opp.PerformOperation(rnd, parents, 0, offspring, 0); // Display the results Console.WriteLine("Parent: " + string.Join(",", parents[0].Data)); Console.WriteLine("Offspring: " + string.Join(",", offspring[0].Data)); }
/// <summary> /// Fit a RBF model to the titanic. /// </summary> /// <param name="dataPath">The path that contains the data file.</param> public void Process(string dataPath) { string trainingPath = Path.Combine(dataPath, TitanicConfig.TrainingFilename); string testPath = Path.Combine(dataPath, TitanicConfig.TestFilename); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // Generate stats on the titanic. var stats = new TitanicStats(); NormalizeTitanic.Analyze(stats, trainingPath); NormalizeTitanic.Analyze(stats, testPath); // Get the training data for the titanic. IList <BasicData> training = NormalizeTitanic.Normalize(stats, trainingPath, null, TitanicConfig.InputNormalizeLow, TitanicConfig.InputNormalizeHigh, TitanicConfig.PredictSurvive, TitanicConfig.PredictPerish); // Fold the data for cross validation. _cross = new CrossValidate(TitanicConfig.FoldCount, training, rnd); // Train each of the folds. for (int k = 0; k < _cross.Count; k++) { Console.WriteLine("Cross validation fold #" + (k + 1) + "/" + _cross.Count); TrainFold(k, _cross.Folds[k]); } // Show the cross validation summary. Console.WriteLine("Crossvalidation summary:"); int kk = 1; foreach (CrossValidateFold fold in _cross.Folds) { Console.WriteLine("Fold #" + kk + ": " + fold.Score); kk++; } Console.WriteLine("Final, crossvalidated score:" + _cross.Score); }
private void Reset() { var rows = (int)(CanvasOutput.ActualHeight / CellSize); var columns = (int)(CanvasOutput.ActualWidth / CellSize); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); CanvasOutput.Children.Clear(); _grid = new Rectangle[rows][]; _nextGrid = new bool[rows][]; _currentGrid = new bool[rows][]; for (int row = 0; row < rows; row++) { _grid[row] = new Rectangle[columns]; _nextGrid[row] = new bool[columns]; _currentGrid[row] = new bool[columns]; for (int col = 0; col < columns; col++) { double x = col * CellSize; double y = row * CellSize; _currentGrid[row][col] = rnd.NextBoolean(); var rect = new Rectangle { Fill = _currentGrid[row][col] ? Brushes.Black : Brushes.White, Width = CellSize, Height = CellSize, Stroke = Brushes.White }; rect.SetValue(Canvas.LeftProperty, x); rect.SetValue(Canvas.TopProperty, y); CanvasOutput.Children.Add(rect); _grid[row][col] = rect; } } }
/// <summary> /// The entry point for this example. If you would like to make this example /// stand alone, then add to its own project and rename to Main. /// </summary> /// <param name="args">Not used.</param> public static void ExampleMain(string[] args) { // Create a new population. IPopulation pop = new BasicPopulation(); ISpecies species = pop.CreateSpecies(); // Create 1000 genomes, assign the score to be the index number. for (int i = 0; i < 1000; i++) { IGenome genome = new IntegerArrayGenome(1); genome.Score = i; genome.AdjustedScore = i; pop.Species[0].Add(genome); } IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // Create a trainer with a very simple score function. We do not care // about the calculation of the score, as they will never be calculated. // We only care that we are maximizing. IEvolutionaryAlgorithm train = new BasicEA(pop, new NullScore()); // Perform the test for round counts between 1 and 10. for (int roundCount = 1; roundCount <= 10; roundCount++) { var selection = new TournamentSelection(train, roundCount); int sum = 0; int count = 0; for (int i = 0; i < 100000; i++) { int genomeID = selection.PerformSelection(rnd, species); IGenome genome = species.Members[genomeID]; sum += (int)genome.AdjustedScore; count++; } sum /= count; Console.WriteLine("Rounds: " + roundCount + ", Avg Score: " + sum); } }
public KFoldCrossvalidation() { Rnd = new MersenneTwisterGenerateRandom(); }
/// <summary> /// Process the specified file. /// </summary> /// <param name="filename">The filename to process.</param> public void Process(String filename) { // read the data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); Stream res = assembly.GetManifestResourceStream("AIFH_Vol2.Resources.simple-poly.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); // Extract supervised training. IList <BasicData> training = ds.ExtractSupervised(0, 1, 1, 1); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); var eval = new EvaluateExpression(rnd); IPopulation pop = InitPopulation(rnd, eval); IScoreFunction score = new ScoreSmallExpression(training, 30); IEvolutionaryAlgorithm genetic = new BasicEA(pop, score); genetic.AddOperation(0.3, new MutateTree(3)); genetic.AddOperation(0.7, new CrossoverTree()); genetic.ShouldIgnoreExceptions = false; int sameSolutionCount = 0; int iteration = 1; double lastSolution = double.MaxValue; var builder = new StringBuilder(); while (sameSolutionCount < MaxSameSolution && iteration < 1000) { genetic.Iteration(); double thisSolution = genetic.LastError; builder.Length = 0; builder.Append("Iteration: "); builder.Append(iteration++); builder.Append(", Current error = "); builder.Append(thisSolution); builder.Append(", Best Solution Length = "); builder.Append(genetic.BestGenome.Count); Console.WriteLine(builder.ToString()); if (Math.Abs(lastSolution - thisSolution) < 1.0) { sameSolutionCount++; } else { sameSolutionCount = 0; } lastSolution = thisSolution; } Console.WriteLine("Good solution found:"); var best = (TreeGenome)genetic.BestGenome; Console.WriteLine(eval.DisplayExpressionNormal(best.Root)); genetic.FinishTraining(); }
/// <summary> /// Train one fold. /// </summary> /// <param name="k">The fold id.</param> /// <param name="fold">The fold.</param> public void TrainFold(int k, CrossValidateFold fold) { int noImprove = 0; double localBest = 0; // Get the training and cross validation sets. IList <BasicData> training = fold.TrainingSet; IList <BasicData> validation = fold.ValidationSet; // Create random particles for the RBF. IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); var particles = new RBFNetwork[TitanicConfig.ParticleCount]; for (int i = 0; i < particles.Length; i++) { particles[i] = new RBFNetwork(TitanicConfig.InputFeatureCount, TitanicConfig.RbfCount, 1); particles[i].Reset(rnd); } /** * Construct a network to hold the best network. */ if (_bestNetwork == null) { _bestNetwork = new RBFNetwork(TitanicConfig.InputFeatureCount, TitanicConfig.RbfCount, 1); } /** * Setup the scoring function. */ IScoreFunction score = new ScoreTitanic(training); IScoreFunction scoreValidate = new ScoreTitanic(validation); /** * Setup particle swarm. */ bool done = false; var train = new TrainPSO(particles, score); int iterationNumber = 0; var line = new StringBuilder(); do { iterationNumber++; train.Iteration(); var best = (RBFNetwork)train.BestParticle; double trainingScore = train.LastError; double validationScore = scoreValidate.CalculateScore(best); if (validationScore > _bestScore) { Array.Copy(best.LongTermMemory, 0, _bestNetwork.LongTermMemory, 0, best.LongTermMemory.Length); _bestScore = validationScore; } if (validationScore > localBest) { noImprove = 0; localBest = validationScore; } else { noImprove++; } line.Length = 0; line.Append("Fold #"); line.Append(k + 1); line.Append(", Iteration #"); line.Append(iterationNumber); line.Append(": training correct: "); line.Append(trainingScore); line.Append(", validation correct: "); line.Append(validationScore); line.Append(", no improvement: "); line.Append(noImprove); if (noImprove > TitanicConfig.AllowNoImprovement) { done = true; } Console.WriteLine(line.ToString()); } while (!done); fold.Score = localBest; }
public void TestBasic() { var rnd = new MersenneTwisterGenerateRandom(1); Assert.AreEqual(4.1702200468159925, rnd.NextDouble(10), AIFH.DefaultPrecision); }