/// <summary> /// Performs the some positions bitflip mutation on a binary vector. /// </summary> /// <param name="random">The random number generator to use.</param> /// <param name="vector">The vector that should be manipulated.</param> /// <param name="pm">The probability a bit is flipped.</param> public static void Apply(IRandom random, BinaryVector vector, DoubleValue pm) { for (int i = 0; i < vector.Length; i++) { if (random.NextDouble() < pm.Value) { vector[i] = !vector[i]; } } }
public static void Create(IRandom random, ISymbolicExpressionTreeNode seedNode, int maxDepth) { // make sure it is possible to create a trees smaller than maxDepth if (seedNode.Grammar.GetMinimumExpressionDepth(seedNode.Symbol) > maxDepth) throw new ArgumentException("Cannot create trees of depth " + maxDepth + " or smaller because of grammar constraints.", "maxDepth"); var arity = SampleArity(random, seedNode); // throw an exception if the seedNode happens to be a terminal, since in this case we cannot grow a tree if (arity <= 0) throw new ArgumentException("Cannot grow tree. Seed node shouldn't have arity zero."); var allowedSymbols = seedNode.Grammar.AllowedSymbols.Where(s => s.InitialFrequency > 0.0).ToList(); for (var i = 0; i < arity; i++) { var possibleSymbols = allowedSymbols.Where(s => seedNode.Grammar.IsAllowedChildSymbol(seedNode.Symbol, s, i)).ToList(); var weights = possibleSymbols.Select(s => s.InitialFrequency).ToList(); #pragma warning disable 612, 618 var selectedSymbol = possibleSymbols.SelectRandom(weights, random); #pragma warning restore 612, 618 var tree = selectedSymbol.CreateTreeNode(); if (tree.HasLocalParameters) tree.ResetLocalParameters(random); seedNode.AddSubtree(tree); } // Only iterate over the non-terminal nodes (those which have arity > 0) // Start from depth 2 since the first two levels are formed by the rootNode and the seedNode foreach (var subTree in seedNode.Subtrees) if (subTree.Grammar.GetMaximumSubtreeCount(subTree.Symbol) > 0) RecursiveCreate(random, subTree, 2, maxDepth); }
/// <summary> /// Performs a slight variation of the order crossover of two permutations. /// </summary> /// <exception cref="ArgumentException">Thrown when <paramref name="parent1"/> and <paramref name="parent2"/> are not of equal length.</exception> /// <remarks> /// Crosses two permutations by copying a randomly chosen interval from the first permutation, preserving /// the positions. Then, from the beginning of the permutation, copies the missing values from the second permutation /// in the order they occur. /// </remarks> /// <param name="random">A random number generator.</param> /// <param name="parent1">The first parent permutation to cross.</param> /// <param name="parent2">The second parent permutation to cross.</param> /// <returns>The new permutation resulting from the crossover.</returns> public static Permutation Apply(IRandom random, Permutation parent1, Permutation parent2) { if (parent1.Length != parent2.Length) throw new ArgumentException("OrderCrossover2: The parent permutations are of unequal length."); int[] result = new int[parent1.Length]; bool[] copied = new bool[result.Length]; int breakPoint1 = random.Next(result.Length - 1); int breakPoint2 = random.Next(breakPoint1 + 1, result.Length); for (int i = breakPoint1; i <= breakPoint2; i++) { // copy part of first permutation result[i] = parent1[i]; copied[parent1[i]] = true; } int index = 0; for (int i = 0; i < parent2.Length; i++) { // copy remaining part of second permutation if (index == breakPoint1) { // skip already copied part index = breakPoint2 + 1; } if (!copied[parent2[i]]) { result[index] = parent2[i]; index++; } } return new Permutation(parent1.PermutationType, result); }
public static void Apply(IRandom random, LinearLinkage lle, int n) { var grouping = lle.GetGroups().ToList(); var groupsLargerOne = grouping.Select((v, i) => Tuple.Create(i, v)) .Where(x => x.Item2.Count > 1) .ToDictionary(x => x.Item1, x => x.Item2); if (groupsLargerOne.Count == 0) return; var toRemove = new List<int>(); for (var i = 0; i < n; i++) { var g = groupsLargerOne.Keys.SampleRandom(random); var idx = random.Next(1, groupsLargerOne[g].Count); // shuffle here to avoid a potential bias of grouping smaller and larger numbers together var tmp = groupsLargerOne[g].Shuffle(random); var before = new List<int>(); var after = new List<int>(); foreach (var t in tmp) { if (idx > 0) before.Add(t); else after.Add(t); idx--; } if (before.Count > 1) groupsLargerOne[grouping.Count] = before; grouping.Add(before); if (after.Count > 1) groupsLargerOne[grouping.Count] = after; grouping.Add(after); toRemove.Add(g); groupsLargerOne.Remove(g); if (groupsLargerOne.Count == 0) break; } foreach (var r in toRemove.OrderByDescending(x => x)) grouping.RemoveAt(r); lle.SetGroups(grouping); }
public static PotvinPDShiftMove Apply(PotvinEncoding individual, IVRPProblemInstance problemInstance, IRandom rand) { List<int> cities = new List<int>(); IPickupAndDeliveryProblemInstance pdp = problemInstance as IPickupAndDeliveryProblemInstance; for (int i = 1; i <= individual.Cities; i++) { if (pdp == null || pdp.GetDemand(i) >= 0) cities.Add(i); } if (cities.Count >= 1) { int city = cities[rand.Next(cities.Count)]; Tour oldTour = individual.Tours.Find(t => t.Stops.Contains(city)); int oldTourIndex = individual.Tours.IndexOf(oldTour); int max = individual.Tours.Count; if (individual.Tours.Count >= problemInstance.Vehicles.Value) max = max - 1; int newTourIndex = rand.Next(max); if (newTourIndex >= oldTourIndex) newTourIndex++; return new PotvinPDShiftMove(city, oldTourIndex, newTourIndex, individual); } else { return null; } }
/// <summary> /// Generates a "sparse" or "dense" polynomial containing numOnes ints equal to 1, /// numNegOnes int equal to -1, and the rest equal to 0. /// </summary> /// /// <param name="N">Number of coeffeients</param> /// <param name="NumOnes">Number of ones</param> /// <param name="NumNegOnes">Number of negative ones</param> /// <param name="Sparse">Create a SparseTernaryPolynomial or DenseTernaryPolynomial</param> /// <param name="Rng">Random number generator</param> /// /// <returns>A ternary polynomial</returns> public static ITernaryPolynomial GenerateRandomTernary(int N, int NumOnes, int NumNegOnes, bool Sparse, IRandom Rng) { if (Sparse) return SparseTernaryPolynomial.GenerateRandom(N, NumOnes, NumNegOnes, Rng); else return DenseTernaryPolynomial.GenerateRandom(N, NumOnes, NumNegOnes, Rng); }
/// <summary> /// Performs the rounded blend alpha crossover (BLX-a) of two integer vectors.<br/> /// It creates new offspring by sampling a new value in the range [min_i - d * alpha, max_i + d * alpha) at each position i /// and rounding the result to the next integer. /// Here min_i and max_i are the smaller and larger value of the two parents at position i and d is max_i - min_i. /// </summary> /// <exception cref="ArgumentException"> /// Thrown when <paramref name="parent1"/> and <paramref name="parent2"/> are of different length or<br/> /// when <paramref name="alpha"/> is less than 0. /// </exception> /// <param name="random">The random number generator.</param> /// <param name="parent1">The first parent for the crossover operation.</param> /// <param name="parent2">The second parent for the crossover operation.</param> /// <param name="bounds">The bounds and step size for each dimension (will be cycled in case there are less rows than elements in the parent vectors).</param> /// <param name="alpha">The alpha value for the crossover.</param> /// <returns>The newly created integer vector resulting from the crossover operation.</returns> public static IntegerVector Apply(IRandom random, IntegerVector parent1, IntegerVector parent2, IntMatrix bounds, DoubleValue alpha) { if (parent1.Length != parent2.Length) throw new ArgumentException("RoundedBlendAlphaCrossover: The parents' vectors are of different length.", "parent1"); if (alpha.Value < 0) throw new ArgumentException("RoundedBlendAlphaCrossover: Paramter alpha must be greater or equal than 0.", "alpha"); if (bounds == null || bounds.Rows < 1 || bounds.Columns < 2) throw new ArgumentException("RoundedBlendAlphaCrossover: Invalid bounds specified.", "bounds"); int length = parent1.Length; var result = new IntegerVector(length); double max = 0, min = 0, d = 0, resMin = 0, resMax = 0; int minBound, maxBound, step = 1; for (int i = 0; i < length; i++) { minBound = bounds[i % bounds.Rows, 0]; maxBound = bounds[i % bounds.Rows, 1]; if (bounds.Columns > 2) step = bounds[i % bounds.Rows, 2]; maxBound = FloorFeasible(minBound, maxBound, step, maxBound - 1); max = Math.Max(parent1[i], parent2[i]); min = Math.Min(parent1[i], parent2[i]); d = Math.Abs(max - min); resMin = FloorFeasible(minBound, maxBound, step, min - d * alpha.Value); resMax = CeilingFeasible(minBound, maxBound, step, max + d * alpha.Value); result[i] = RoundFeasible(minBound, maxBound, step, resMin + random.NextDouble() * Math.Abs(resMax - resMin)); } return result; }
/// <summary> /// Performs a cross over permutation of <paramref name="parent1"/> and <paramref name="parent2"/> /// based on randomly chosen positions to define which position to take from where. /// </summary> /// <exception cref="ArgumentException">Thrown when <paramref name="parent1"/> and <paramref name="parent2"/> are not of equal length.</exception> /// <param name="random">The random number generator.</param> /// <param name="parent1">First parent</param> /// <param name="parent2">Second Parent</param> /// <returns>Child</returns> public static Permutation Apply(IRandom random, Permutation parent1, Permutation parent2) { if (parent1.Length != parent2.Length) throw new ArgumentException("PositionBasedCrossover: The parent permutations are of unequal length."); int length = parent1.Length; int[] result = new int[length]; bool[] randomPosition = new bool[length]; bool[] numberCopied = new bool[length]; int randomPosNumber = random.Next(length); for (int i = 0; i < randomPosNumber; i++) { // generate random bit mask randomPosition[random.Next(length)] = true; } for (int i = 0; i < length; i++) { // copy numbers masked as true from second permutation if (randomPosition[i]) { result[i] = parent2[i]; numberCopied[parent2[i]] = true; } } int index = 0; for (int i = 0; i < length; i++) { // copy numbers masked as false from first permutation if (!numberCopied[parent1[i]]) { if (randomPosition[index]) { while (randomPosition[index]) { index++; } } result[index] = parent1[i]; index++; } } return new Permutation(parent1.PermutationType, result); }
/// <summary> /// Performs an adaptive normally distributed all position manipulation on the given /// <paramref name="vector"/>. /// </summary> /// <exception cref="InvalidOperationException">Thrown when the strategy vector is not /// as long as the vector to get manipulated.</exception> /// <param name="sigma">The strategy vector determining the strength of the mutation.</param> /// <param name="random">A random number generator.</param> /// <param name="vector">The real vector to manipulate.</param> /// <returns>The manipulated real vector.</returns> public static void Apply(IRandom random, RealVector vector, RealVector sigma) { if (sigma == null || sigma.Length == 0) throw new ArgumentException("ERROR: Vector containing the standard deviations is not defined.", "sigma"); NormalDistributedRandom N = new NormalDistributedRandom(random, 0.0, 1.0); for (int i = 0; i < vector.Length; i++) { vector[i] = vector[i] + (N.NextDouble() * sigma[i % sigma.Length]); } }
/// <summary> /// Moves an randomly chosen element in the specified <paramref name="permutation"/> array /// to another randomly generated position. /// </summary> /// <param name="random">The random number generator.</param> /// <param name="permutation">The permutation to manipulate.</param> public static void Apply(IRandom random, Permutation permutation) { Permutation original = (Permutation)permutation.Clone(); int cutIndex, insertIndex, number; cutIndex = random.Next(original.Length); insertIndex = random.Next(original.Length); number = original[cutIndex]; int i = 0; // index in new permutation int j = 0; // index in old permutation while (i < original.Length) { if (j == cutIndex) { j++; } if (i == insertIndex) { permutation[i] = number; i++; } if ((i < original.Length) && (j < original.Length)) { permutation[i] = original[j]; i++; j++; } } }
public static void RemoveRandomBranch(IRandom random, ISymbolicExpressionTree symbolicExpressionTree, int maxTreeLength, int maxTreeDepth) { var allowedSymbols = new List<ISymbol>(); ISymbolicExpressionTreeNode parent; int childIndex; int maxLength; int maxDepth; // repeat until a fitting parent and child are found (MAX_TRIES times) int tries = 0; var nodes = symbolicExpressionTree.Root.IterateNodesPrefix().Skip(1).Where(n => n.SubtreeCount > 0).ToList(); do { parent = nodes.SampleRandom(random); childIndex = random.Next(parent.SubtreeCount); var child = parent.GetSubtree(childIndex); maxLength = maxTreeLength - symbolicExpressionTree.Length + child.GetLength(); maxDepth = maxTreeDepth - symbolicExpressionTree.Root.GetBranchLevel(child); allowedSymbols.Clear(); foreach (var symbol in parent.Grammar.GetAllowedChildSymbols(parent.Symbol, childIndex)) { // check basic properties that the new symbol must have if ((symbol.Name != child.Symbol.Name || symbol.MinimumArity > 0) && symbol.InitialFrequency > 0 && parent.Grammar.GetMinimumExpressionDepth(symbol) <= maxDepth && parent.Grammar.GetMinimumExpressionLength(symbol) <= maxLength) { allowedSymbols.Add(symbol); } } tries++; } while (tries < MAX_TRIES && allowedSymbols.Count == 0); if (tries >= MAX_TRIES) return; ReplaceWithMinimalTree(random, symbolicExpressionTree.Root, parent, childIndex); }
protected override void Manipulate(IRandom random, GVREncoding individual) { int customer = random.Next(1, individual.Cities + 1); Tour tour; int position; individual.FindCustomer(customer, out tour, out position); tour.Stops.RemoveAt(position); //with a probability of 1/(2*V) create a new tour, else insert at another position if (individual.GetTours().Count > 0 && individual.GetTours().Count < ProblemInstance.Vehicles.Value && random.Next(individual.GetTours().Count * 2) == 0) { Tour newTour = new Tour(); newTour.Stops.Add(customer); individual.Tours.Add(newTour); } else { Tour newTour = individual.Tours[random.Next(individual.Tours.Count)]; int newPosition = random.Next(newTour.Stops.Count + 1); newTour.Stops.Insert(newPosition, customer); } if (tour.Stops.Count == 0) individual.Tours.Remove(tour); }
protected override void Manipulate(IRandom random, PrinsEncoding individual) { List<Tour> tours = individual.GetTours(); bool improvement = false; int iterations = 0; do { improvement = false; double originalQuality = GetQuality(individual); PrinsEncoding child = null; int samples = 0; while (!improvement && samples < SampleSize.Value.Value) { int u = random.Next(ProblemInstance.Cities.Value); int v = random.Next(ProblemInstance.Cities.Value); child = Manipulate(individual, originalQuality, u, v); improvement = child != null; samples++; } if (improvement) { for (int i = 0; i < child.Length; i++) { individual[i] = child[i]; } } iterations++; } while (improvement && iterations < Iterations.Value.Value); }
private DoubleArray Randomize(IRandom random, int length, DoubleMatrix bounds) { var result = new DoubleArray(length); for (int i = 0; i < length; i++) { result[i] = random.NextDouble() * bounds[i % bounds.Rows, 1] - bounds[i % bounds.Rows, 0]; } return result; }
public PlanetGenerator(IMap map, IRandom random) { _map = map; _random = random; MaximumMapSize = new Size(10000,10000); MaximumPlanetSize = 250; }
public static LinearLinkage Apply(IRandom random, LinearLinkage p1, LinearLinkage p2) { var length = p1.Length; var child = new LinearLinkage(length); var endNodes = new HashSet<int>(); for (var i = 0; i < length; i++) { if ((p1[i] == i && p2[i] == i) || ((p1[i] == i || p2[i] == i) && random.NextDouble() < 0.5)) { child[i] = i; endNodes.Add(i); } } for (var i = 0; i < length; i++) { if (endNodes.Contains(i)) continue; var p1End = endNodes.Contains(p1[i]); var p2End = endNodes.Contains(p2[i]); if ((p1End && p2End) || (!p1End && !p2End)) { child[i] = random.NextDouble() < 0.5 ? p1[i] : p2[i]; } else if (p1End) { child[i] = p1[i]; } else { child[i] = p2[i]; } } child.LinearizeTreeStructures(); return child; }
public static LinearLinkage Apply(IRandom random, ItemArray<LinearLinkage> parents) { var len = parents[0].Length; var child = new LinearLinkage(len); var childGroup = new List<HashSet<int>>(); var currentParent = random.Next(parents.Length); var groups = parents.Select(x => x.GetGroups().Select(y => new HashSet<int>(y)).ToList()).ToList(); bool remaining; do { var maxGroup = groups[currentParent].Select((v, i) => Tuple.Create(i, v)) .MaxItems(x => x.Item2.Count) .SampleRandom(random).Item1; var group = groups[currentParent][maxGroup]; groups[currentParent].RemoveAt(maxGroup); childGroup.Add(group); remaining = false; for (var p = 0; p < groups.Count; p++) { for (var j = 0; j < groups[p].Count; j++) { foreach (var elem in group) groups[p][j].Remove(elem); if (!remaining && groups[p][j].Count > 0) remaining = true; } } currentParent = (currentParent + 1) % parents.Length; } while (remaining); child.SetGroups(childGroup); return child; }
public static PWREncoding Apply(IRandom random, PWREncoding parent1, PWREncoding parent2) { var result = new PWREncoding(); var p1 = ((IntegerVector)(parent1.PermutationWithRepetition.Clone())).ToList(); var p2 = ((IntegerVector)(parent2.PermutationWithRepetition.Clone())).ToList(); var child = new List<int>(); var lookUpTable = new bool[parent1.PermutationWithRepetition.Length]; for (int i = 0; i < lookUpTable.Length; i++) { lookUpTable[i] = random.Next(2) == 1; } foreach (bool b in lookUpTable) { if (b) { child.Add(p1[0]); p2.Remove(p1[0]); p1.RemoveAt(0); } else { child.Add(p2[0]); p1.Remove(p2[0]); p2.RemoveAt(0); } } result.PermutationWithRepetition = new IntegerVector(child.ToArray()); return result; }
public static ScrambleMove GenerateRandomMove(Permutation permutation, IRandom random) { int breakPoint1, breakPoint2; int[] scrambledIndices; breakPoint1 = random.Next(permutation.Length); do { breakPoint2 = random.Next(permutation.Length); } while (Math.Abs(breakPoint2 - breakPoint1) <= 1); if (breakPoint2 < breakPoint1) { int h = breakPoint1; breakPoint1 = breakPoint2; breakPoint2 = h; } scrambledIndices = new int[breakPoint2 - breakPoint1 + 1]; for (int i = 0; i < scrambledIndices.Length; i++) scrambledIndices[i] = i; bool[] moved = new bool[scrambledIndices.Length]; bool changed = false; do { for (int i = scrambledIndices.Length - 1; i > 0; i--) { int j = random.Next(i + 1); int t = scrambledIndices[j]; scrambledIndices[j] = scrambledIndices[i]; scrambledIndices[i] = t; if (scrambledIndices[j] == j) moved[j] = false; else moved[j] = true; if (scrambledIndices[i] == i) moved[i] = false; else moved[i] = true; } changed = moved.Any(x => x); } while (!changed); return new ScrambleMove(breakPoint1, scrambledIndices); }
public static AlbaIntraRouteInversionMove Apply(AlbaEncoding individual, int cities, IRandom rand) { int index1 = -1; int index2 = -1; List<Tour> validTours = new List<Tour>(); foreach (Tour tour in individual.GetTours()) { if (tour.Stops.Count >= 4) validTours.Add(tour); } if (validTours.Count > 0) { Tour chosenTour = validTours[rand.Next(validTours.Count)]; int currentTourStart = -1; for (int i = 0; i < individual.Length; i++) { if (individual[i] + 1 == chosenTour.Stops[0]) { currentTourStart = i; break; } } int currentTourEnd = currentTourStart; while (currentTourEnd < individual.Length && individual[currentTourEnd] < cities) { currentTourEnd++; } int tourLength = currentTourEnd - currentTourStart; int a = rand.Next(tourLength - 3); index1 = currentTourStart + a; index2 = currentTourStart + rand.Next(a + 2, tourLength - 1); } return new AlbaIntraRouteInversionMove(index1, index2, individual); }
public static JSMEncoding Apply(int jobs, int resources, IRandom random) { var solution = new JSMEncoding(); for (int i = 0; i < resources; i++) { solution.JobSequenceMatrix.Add(new Permutation(PermutationTypes.Absolute, jobs, random)); } return solution; }
public SnailNameGenerator(IRandom random) { this.random = random; names = new List<string>() { "Mike", "Alan", "Hugh", "Clement", "Levi", "Oak", "Potato", "Ethan", "Hannah", "Kimbo", "Cheese-Man", "Choco", "Strawberry", "Pancake", "Monster", "Smurf", "Fish", "Bobrika", "Bacon", "Speedy", "Lightning", "Trailblazer", "Maimai", "Denden", "Rambo" }; }
private void CreateDecorationAt(Chunk chunk, int blockX, int blockY, int blockZ, IRandom random) { int offsetX = blockX; int offsetY = blockY; int numberOfVerticalSegments = BlockSize.Z / 5; int diskZ = blockZ; int radius = 5; BlockType blockType = BlockType.Stone; for (int seg = 0; seg < numberOfVerticalSegments; seg++) { for (int disc = 0; disc < 5; disc++) { CreateDiskAt(offsetX, offsetY, diskZ, radius, blockType); diskZ++; } if (radius > 1) { radius--; if (radius == 1) { blockType = BlockType.Dirt; } } } AddGameObjectDecorationToWorld("gray diamond", chunk, new Vector3(blockX + 0.5f, blockY + 0.5f, diskZ + 0.1f), new Vector3(0, -90, 0)); }
public static AlbaLambdaInterchangeMove Apply(AlbaEncoding individual, int cities, int lambda, IRandom rand) { List<Tour> tours = individual.GetTours(); if (tours.Count > 1) { int route1Index = rand.Next(tours.Count); Tour route1 = tours[route1Index]; int route2Index = rand.Next(tours.Count - 1); if (route2Index >= route1Index) route2Index += 1; Tour route2 = tours[route2Index]; int length1 = rand.Next(Math.Min(lambda + 1, route1.Stops.Count + 1)); int index1 = rand.Next(route1.Stops.Count - length1 + 1); int l2Min = 0; if (length1 == 0) l2Min = 1; int length2 = rand.Next(l2Min, Math.Min(lambda + 1, route2.Stops.Count + 1)); int index2 = rand.Next(route2.Stops.Count - length2 + 1); return new AlbaLambdaInterchangeMove(route1Index, index1, length1, route2Index, index2, length2, individual); } else { return new AlbaLambdaInterchangeMove(0, 0, 0, 0, 0, 0, individual); } }
/// <summary> /// Mixes the elements of the given <paramref name="permutation"/> randomly /// in a randomly chosen interval. /// </summary> /// <param name="random">The random number generator.</param> /// <param name="permutation">The permutation to manipulate.</param> public static void Apply(IRandom random, Permutation permutation) { int breakPoint1, breakPoint2; int[] scrambledIndices, remainingIndices, temp; int selectedIndex, index; breakPoint1 = random.Next(permutation.Length - 1); breakPoint2 = random.Next(breakPoint1 + 1, permutation.Length); scrambledIndices = new int[breakPoint2 - breakPoint1 + 1]; remainingIndices = new int[breakPoint2 - breakPoint1 + 1]; for (int i = 0; i < remainingIndices.Length; i++) { // initialise indices remainingIndices[i] = i; } for (int i = 0; i < scrambledIndices.Length; i++) { // generate permutation of indices selectedIndex = random.Next(remainingIndices.Length); scrambledIndices[i] = remainingIndices[selectedIndex]; temp = remainingIndices; remainingIndices = new int[temp.Length - 1]; index = 0; for (int j = 0; j < remainingIndices.Length; j++) { if (index == selectedIndex) { index++; } remainingIndices[j] = temp[index]; index++; } } Apply(permutation, breakPoint1, scrambledIndices); }
public static bool DeleteSubroutine( IRandom random, ISymbolicExpressionTree symbolicExpressionTree, int maxFunctionDefinitions, int maxFunctionArguments) { var functionDefiningBranches = symbolicExpressionTree.IterateNodesPrefix().OfType<DefunTreeNode>().ToList(); if (!functionDefiningBranches.Any()) // no ADF to delete => abort return false; var selectedDefunBranch = functionDefiningBranches.SampleRandom(random); // remove the selected defun int defunSubtreeIndex = symbolicExpressionTree.Root.IndexOfSubtree(selectedDefunBranch); symbolicExpressionTree.Root.RemoveSubtree(defunSubtreeIndex); // remove references to deleted function foreach (var subtree in symbolicExpressionTree.Root.Subtrees.OfType<SymbolicExpressionTreeTopLevelNode>()) { var matchingInvokeSymbol = (from symb in subtree.Grammar.Symbols.OfType<InvokeFunction>() where symb.FunctionName == selectedDefunBranch.FunctionName select symb).SingleOrDefault(); if (matchingInvokeSymbol != null) { subtree.Grammar.RemoveSymbol(matchingInvokeSymbol); } } DeletionByRandomRegeneration(random, symbolicExpressionTree, selectedDefunBranch); return true; }
protected override void Manipulate(IRandom random, GVREncoding individual) { Tour tour = individual.Tours[random.Next(individual.Tours.Count)]; int breakPoint1 = random.Next(tour.Stops.Count); int length = random.Next(1, tour.Stops.Count - breakPoint1 + 1); List<int> displaced = tour.Stops.GetRange(breakPoint1, length); tour.Stops.RemoveRange(breakPoint1, length); //with a probability of 1/(2*V) create a new tour, else insert at another position if (individual.GetTours().Count > 0 && individual.GetTours().Count < ProblemInstance.Vehicles.Value && random.Next(individual.GetTours().Count * 2) == 0) { Tour newTour = new Tour(); newTour.Stops.InsertRange(0, displaced); individual.Tours.Add(newTour); } else { Tour newTour = individual.Tours[random.Next(individual.Tours.Count)]; int newPosition = newTour.Stops.Count; newTour.Stops.InsertRange(newPosition, displaced); } if (tour.Stops.Count == 0) individual.Tours.Remove(tour); }
/// <summary> /// Performs a breeder genetic algorithm manipulation on the given <paramref name="vector"/>. /// </summary> /// <param name="random">A random number generator.</param> /// <param name="vector">The real vector to manipulate.</param> /// <param name="bounds">The lower and upper bound (1st and 2nd column) of the positions in the vector. If there are less rows than dimensions, the rows are cycled.</param> /// <param name="searchIntervalFactor">The factor determining the size of the search interval.</param> public static void Apply(IRandom random, RealVector vector, DoubleMatrix bounds, DoubleValue searchIntervalFactor) { int length = vector.Length; double prob, value; do { value = Sigma(random); } while (value == 0); prob = 1.0 / (double)length; bool wasMutated = false; for (int i = 0; i < length; i++) { if (random.NextDouble() < prob) { double range = bounds[i % bounds.Rows, 1] - bounds[i % bounds.Rows, 0]; if (random.NextDouble() < 0.5) { vector[i] = vector[i] + value * searchIntervalFactor.Value * range; } else { vector[i] = vector[i] - value * searchIntervalFactor.Value * range; } wasMutated = true; } } // make sure at least one gene was mutated if (!wasMutated) { int pos = random.Next(length); double range = bounds[pos % bounds.Rows, 1] - bounds[pos % bounds.Rows, 0]; if (random.NextDouble() < 0.5) { vector[pos] = vector[pos] + value * searchIntervalFactor.Value * range; } else { vector[pos] = vector[pos] - value * searchIntervalFactor.Value * range; } } }
public virtual void Randomize(IRandom random, int startIndex, int length) { if (length > 0) { for (int i = 0; i < length; i++) array[startIndex + i] = random.Next(2) == 0; OnReset(); } }
/// <summary> /// Performs the order crossover of two permutations. /// </summary> /// <exception cref="ArgumentException">Thrown when <paramref name="parent1"/> and <paramref name="parent2"/> are not of equal length.</exception> /// <exception cref="InvalidOperationException">Thrown if the numbers in the permutation elements are not in the range [0,N) with N = length of the permutation.</exception> /// <remarks> /// Crosses two permutations by copying a randomly chosen interval from the first permutation, preserving /// the positions. Then, starting from the end of the copied interval, copies the missing values from the second permutation /// in the order they occur. /// </remarks> /// <param name="random">A random number generator.</param> /// <param name="parent1">The first parent permutation to cross.</param> /// <param name="parent2">The second parent permutation to cross.</param> /// <returns>The new permutation resulting from the crossover.</returns> public static Permutation Apply(IRandom random, Permutation parent1, Permutation parent2) { if (parent1.Length != parent2.Length) throw new ArgumentException("OrderCrossover: The parent permutations are of unequal length."); int length = parent1.Length; int[] result = new int[length]; bool[] copied = new bool[length]; int breakPoint1 = random.Next(length - 1); int breakPoint2 = random.Next(breakPoint1 + 1, length); try { for (int j = breakPoint1; j <= breakPoint2; j++) { // copy part of first permutation result[j] = parent1[j]; copied[parent1[j]] = true; } int index = ((breakPoint2 + 1 >= length) ? (0) : (breakPoint2 + 1)); int i = index; // for moving in parent2 while (index != breakPoint1) { if (!copied[parent2[i]]) { result[index] = parent2[i]; index++; if (index >= length) index = 0; } i++; if (i >= length) i = 0; } } catch (IndexOutOfRangeException) { throw new InvalidOperationException("OrderCrossover: The permutation must consist of numbers in the interval [0;N) with N = length of the permutation."); } return new Permutation(parent1.PermutationType, result); }
public T Pick(IRandom rand) => this.ToSpawn;
protected override LinearLinkage Create(IRandom random, int length) { var maxGroupSize = MaxGroupSizeParameter.ActualValue.Value; return(Apply(random, length, maxGroupSize)); }
private InnerMap GoGenerateInternal(InnerMap innerMap, IRandom random, Action <int, int, long, long> pixelChangedCallback) { long totSteps = (((long)innerMap.Width - 1L) / 2L) * (((long)innerMap.Height - 1L) / 2L) * 2; long currentStep = 1; KruskalCell[][] theMap; //Prepare theMap = new KruskalCell[innerMap.Width][]; for (int x = 0; x < innerMap.Width; x++) { theMap[x] = new KruskalCell[innerMap.Height]; for (int y = 0; y < innerMap.Height; y++) { KruskalCell c = new KruskalCell(x, y); theMap[x][y] = c; if ((x + 1) % 2 == 0 && (y + 1) % 2 == 0 && x != innerMap.Width - 1 && y != innerMap.Height - 1) { currentStep++; //pixelChangedCallback(x, y, currentStep, totSteps); c.Solid = false; c.CellSet.Add(c); } else { c.Solid = true; } } } //Find walls and add neighbouring cells List <KruskalCell> walls = new List <KruskalCell>(); for (int y = 1; y < innerMap.Height - 2; y++) { Boolean horizontalwall = false; int startje = 1; if (y % 2 == 1) { horizontalwall = true; startje = 2; } for (int x = startje; x < innerMap.Width - 2; x = x + 2) { KruskalCell ccc = theMap[x][y]; ccc.Solid = true; walls.Add(ccc); ccc.CellSet.Clear(); if (horizontalwall) { //form.pixelDraw(x, y, Brushes.Blue); ccc.CellSet.Add(theMap[x - 1][y]); ccc.CellSet.Add(theMap[x + 1][y]); } else { //form.pixelDraw(x, y, Brushes.Yellow); ccc.CellSet.Add(theMap[x][y - 1]); ccc.CellSet.Add(theMap[x][y + 1]); } } } walls = walls.RandomPermutation(random); int cur = 0; foreach (KruskalCell wall in walls) { cur++; KruskalCell cell1 = wall.CellSet[0]; KruskalCell cell2 = wall.CellSet[1]; if (!cell1.CellSet.Equals(cell2.CellSet)) { wall.Solid = false; currentStep++; pixelChangedCallback(wall.X, wall.Y, currentStep, totSteps); List <KruskalCell> l1 = cell1.CellSet; List <KruskalCell> l2 = cell2.CellSet; if (l1.Count > l2.Count) { l1.AddRange(l2); foreach (KruskalCell c in l2) { c.CellSet = l1; } } else { l2.AddRange(l1); foreach (KruskalCell c in l1) { c.CellSet = l2; } } } } for (int y = 0; y < innerMap.Height; y++) { for (int x = 0; x < innerMap.Width; x++) { var solid = theMap[x][y].Solid; if (solid) { innerMap[x, y] = false; } else { innerMap[x, y] = true; } } } return(innerMap); }
public sealed override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> predicate, int n, IRandom rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); var inputs = Source.GetRowCursorSet(out consolidator, inputPred, n, rand); Host.AssertNonEmpty(inputs); // No need to split if this is given 1 input cursor. var cursors = new IRowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { cursors[i] = new RowCursor(Host, _bindings, inputs[i], active); } return(cursors); }
public static void ChangeNodeType(IRandom random, ISymbolicExpressionTree symbolicExpressionTree) { List <ISymbol> allowedSymbols = new List <ISymbol>(); ISymbolicExpressionTreeNode parent; int childIndex; ISymbolicExpressionTreeNode child; // repeat until a fitting parent and child are found (MAX_TRIES times) int tries = 0; do { #pragma warning disable 612, 618 parent = symbolicExpressionTree.Root.IterateNodesPrefix().Skip(1).Where(n => n.SubtreeCount > 0).SelectRandom(random); #pragma warning restore 612, 618 childIndex = random.Next(parent.SubtreeCount); child = parent.GetSubtree(childIndex); int existingSubtreeCount = child.SubtreeCount; allowedSymbols.Clear(); foreach (var symbol in parent.Grammar.GetAllowedChildSymbols(parent.Symbol, childIndex)) { // check basic properties that the new symbol must have if (symbol.Name != child.Symbol.Name && symbol.InitialFrequency > 0 && existingSubtreeCount <= parent.Grammar.GetMinimumSubtreeCount(symbol) && existingSubtreeCount >= parent.Grammar.GetMaximumSubtreeCount(symbol)) { // check that all existing subtrees are also allowed for the new symbol bool allExistingSubtreesAllowed = true; for (int existingSubtreeIndex = 0; existingSubtreeIndex < existingSubtreeCount && allExistingSubtreesAllowed; existingSubtreeIndex++) { var existingSubtree = child.GetSubtree(existingSubtreeIndex); allExistingSubtreesAllowed &= parent.Grammar.IsAllowedChildSymbol(symbol, existingSubtree.Symbol, existingSubtreeIndex); } if (allExistingSubtreesAllowed) { allowedSymbols.Add(symbol); } } } tries++; } while (tries < MAX_TRIES && allowedSymbols.Count == 0); if (tries < MAX_TRIES) { var weights = allowedSymbols.Select(s => s.InitialFrequency).ToList(); #pragma warning disable 612, 618 var newSymbol = allowedSymbols.SelectRandom(weights, random); #pragma warning restore 612, 618 // replace the old node with the new node var newNode = newSymbol.CreateTreeNode(); if (newNode.HasLocalParameters) { newNode.ResetLocalParameters(random); } foreach (var subtree in child.Subtrees) { newNode.AddSubtree(subtree); } parent.RemoveSubtree(childIndex); parent.InsertSubtree(childIndex, newNode); } }
protected override void Manipulate(IRandom random, ISymbolicExpressionTree symbolicExpressionTree) { ChangeNodeType(random, symbolicExpressionTree); }
private GVREncoding Crossover(IRandom random, GVREncoding parent1, GVREncoding parent2) { GVREncoding child = parent1.Clone() as GVREncoding; Tour tour = parent2.Tours[random.Next(parent2.Tours.Count)]; int breakPoint1 = random.Next(tour.Stops.Count); int length = random.Next(1, tour.Stops.Count - breakPoint1 + 1); List <int> subroute = tour.Stops.GetRange(breakPoint1, length); //remove duplicates List <Tour> toBeRemoved = new List <Tour>(); foreach (Tour route in child.Tours) { foreach (int city in subroute) { route.Stops.Remove(city); } if (route.Stops.Count == 0) { toBeRemoved.Add(route); } } foreach (Tour route in toBeRemoved) { child.Tours.Remove(route); } //choose nearest customer double minDistance = -1; int customer = -1; for (int i = 1; i <= ProblemInstance.Cities.Value; i++) { if (!subroute.Contains(i)) { double distance = ProblemInstance.GetDistance(subroute[0], i, child); if (customer == -1 || distance < minDistance) { customer = i; minDistance = distance; } } } //insert if (customer != -1) { Tour newTour; int newPosition; child.FindCustomer(customer, out newTour, out newPosition); newTour.Stops.InsertRange(newPosition + 1, subroute); } else { //special case -> only one tour, whole tour has been chosen as subroute child = parent1.Clone() as GVREncoding; } return(child); }
protected abstract LinearLinkage Cross(IRandom random, ItemArray <LinearLinkage> parents);
/// <summary> /// Returns a random element from a source. /// </summary> /// <typeparam name="T">The type of items generated from the source.</typeparam> /// <param name="source">The list.</param> /// <param name="random">The random generator to use.</param> /// <returns>A item randomly selected from the source.</returns> public static T RandomItem <T>(this IEnumerable <T> source, IRandom random) { return(source.SampleRandom(1, random).First()); }
public override JSMEncoding Cross(IRandom random, JSMEncoding parent1, JSMEncoding parent2) { return(Apply(random, parent1, parent2)); }
public DapperDb(IRandom random, DbProviderFactory dbProviderFactory, IOptions <AppSettings> appSettings) { _random = random; _dbProviderFactory = dbProviderFactory; _connectionString = appSettings.Value.ConnectionString; }
public BattleLogTest() { _tableSheets = new TableSheets(TableSheetsImporter.ImportSheets()); _random = new ItemEnhancementTest.TestRandom(); }
public TokenSet(IRandom random) { m_Random = random; }
public override void ApplyToPath(IRandom rand, GridPlan floorPlan) { int gapLength = Vertical ? floorPlan.GridHeight : floorPlan.GridWidth; int sideLength = Vertical ? floorPlan.GridWidth : floorPlan.GridHeight; if (gapLength < 3 || sideLength < 2) { CreateErrorPath(rand, floorPlan); return; } //add the body int chosenTier = FromCorners ? (rand.Next(2) * gapLength - 1) : rand.Next(1, gapLength - 1); RoomGen <T> roomGen = GiantHallGen.Pick(rand); if (roomGen == null) { roomGen = GenericRooms.Pick(rand); } floorPlan.AddRoom(new Rect(Vertical ? 0 : chosenTier, Vertical ? chosenTier : 0, Vertical ? sideLength : 1, Vertical ? 1 : sideLength), roomGen, this.LargeRoomComponents.Clone()); GenContextDebug.DebugProgress("Center Room"); //add the legs for (int ii = 0; ii < sideLength; ii++) { if (chosenTier > 0) { if (rand.Next(100) < LegPercent) { int roomTier = rand.Next(0, chosenTier); floorPlan.AddRoom(new Loc(Vertical ? ii : roomTier, Vertical ? roomTier : ii), GenericRooms.Pick(rand), this.RoomComponents.Clone()); for (int jj = roomTier; jj < chosenTier; jj++) { SafeAddHall(new LocRay4(new Loc(Vertical ? ii : jj, Vertical ? jj : ii), Vertical ? Dir4.Down : Dir4.Right), floorPlan, GenericHalls.Pick(rand), GetDefaultGen(), this.RoomComponents, this.HallComponents, true); } GenContextDebug.DebugProgress("Add Leg"); int hasRoom = -1; for (int jj = ii - 1; jj >= 0; jj--) { if (floorPlan.GetRoomPlan(new Loc(Vertical ? jj : roomTier, Vertical ? roomTier : jj)) != null) { hasRoom = jj; break; } } if (ii > 0 && hasRoom > -1) { if (rand.Next(100) < ConnectPercent) { for (int jj = ii; jj > hasRoom; jj--) { SafeAddHall(new LocRay4(new Loc(Vertical ? jj : roomTier, Vertical ? roomTier : jj), Vertical ? Dir4.Left : Dir4.Up), floorPlan, GenericHalls.Pick(rand), GetDefaultGen(), this.RoomComponents, this.HallComponents, true); GenContextDebug.DebugProgress("Connect Leg"); } } } } } if (chosenTier < gapLength - 1) { if (rand.Next(100) < LegPercent) { int roomTier = rand.Next(chosenTier + 1, gapLength); floorPlan.AddRoom(new Loc(Vertical ? ii : roomTier, Vertical ? roomTier : ii), GenericRooms.Pick(rand), this.RoomComponents.Clone()); for (int jj = chosenTier; jj < roomTier; jj++) { SafeAddHall(new LocRay4(new Loc(Vertical ? ii : jj, Vertical ? jj : ii), Vertical ? Dir4.Down : Dir4.Right), floorPlan, GenericHalls.Pick(rand), GetDefaultGen(), this.RoomComponents, this.HallComponents, true); } GenContextDebug.DebugProgress("Add Leg"); int hasRoom = -1; for (int jj = ii - 1; jj >= 0; jj--) { if (floorPlan.GetRoomPlan(new Loc(Vertical ? jj : roomTier, Vertical ? roomTier : jj)) != null) { hasRoom = jj; break; } } if (ii > 0 && hasRoom > -1) { if (rand.Next(100) < ConnectPercent) { for (int jj = ii; jj > hasRoom; jj--) { SafeAddHall(new LocRay4(new Loc(Vertical ? jj : roomTier, Vertical ? roomTier : jj), Vertical ? Dir4.Left : Dir4.Up), floorPlan, GenericHalls.Pick(rand), GetDefaultGen(), this.RoomComponents, this.HallComponents, true); GenContextDebug.DebugProgress("Connect Leg"); } } } } } } }
/// <inheritdoc/> protected override void TrainWithoutLock(IProgressChannelProvider progress, FloatLabelCursor.Factory cursorFactory, IRandom rand, IdToIdxLookup idToIdx, int numThreads, DualsTableBase duals, Float[] biasReg, Float[] invariants, Float lambdaNInv, VBuffer <Float>[] weights, Float[] biasUnreg, VBuffer <Float>[] l1IntermediateWeights, Float[] l1IntermediateBias, Float[] featureNormSquared) { Contracts.AssertValueOrNull(progress); Contracts.Assert(Args.L1Threshold.HasValue); Contracts.AssertValueOrNull(idToIdx); Contracts.AssertValueOrNull(invariants); Contracts.AssertValueOrNull(featureNormSquared); int numClasses = Utils.Size(weights); Contracts.Assert(Utils.Size(biasReg) == numClasses); Contracts.Assert(Utils.Size(biasUnreg) == numClasses); int maxUpdateTrials = 2 * numThreads; var l1Threshold = Args.L1Threshold.Value; bool l1ThresholdZero = l1Threshold == 0; var lr = Args.BiasLearningRate * Args.L2Const.Value; var pch = progress != null?progress.StartProgressChannel("Dual update") : null; using (pch) using (var cursor = Args.Shuffle ? cursorFactory.Create(rand) : cursorFactory.Create()) { long rowCount = 0; if (pch != null) { pch.SetHeader(new ProgressHeader("examples"), e => e.SetProgress(0, rowCount)); } Func <UInt128, long> getIndexFromId = GetIndexFromIdGetter(idToIdx, biasReg.Length); while (cursor.MoveNext()) { long idx = getIndexFromId(cursor.Id); long dualIndexInitPos = idx * numClasses; var features = cursor.Features; var label = (int)cursor.Label; Float invariant; Float normSquared; if (invariants != null) { invariant = invariants[idx]; Contracts.AssertValue(featureNormSquared); normSquared = featureNormSquared[idx]; } else { normSquared = VectorUtils.NormSquared(features); if (Args.BiasLearningRate == 0) { normSquared += 1; } invariant = _loss.ComputeDualUpdateInvariant(2 * normSquared * lambdaNInv * GetInstanceWeight(cursor)); } // The output for the label class using current weights and bias. var labelOutput = WDot(ref features, ref weights[label], biasReg[label] + biasUnreg[label]); var instanceWeight = GetInstanceWeight(cursor); // This will be the new dual variable corresponding to the label class. Float labelDual = 0; // This will be used to update the weights and regularized bias corresponding to the label class. Float labelPrimalUpdate = 0; // This will be used to update the unregularized bias corresponding to the label class. Float labelAdjustment = 0; // Iterates through all classes. for (int iClass = 0; iClass < numClasses; iClass++) { // Skip the dual/weights/bias update for label class. Will be taken care of at the end. if (iClass == label) { continue; } // Loop trials for compare-and-swap updates of duals. // In general, concurrent update conflict to the same dual variable is rare // if data is shuffled. for (int numTrials = 0; numTrials < maxUpdateTrials; numTrials++) { long dualIndex = iClass + dualIndexInitPos; var dual = duals[dualIndex]; var output = labelOutput + labelPrimalUpdate * normSquared - WDot(ref features, ref weights[iClass], biasReg[iClass] + biasUnreg[iClass]); var dualUpdate = _loss.DualUpdate(output, 1, dual, invariant, numThreads); // The successive over-relaxation apporach to adjust the sum of dual variables (biasReg) to zero. // Reference to details: http://stat.rutgers.edu/home/tzhang/papers/ml02_dual.pdf, pp. 16-17. var adjustment = l1ThresholdZero ? lr * biasReg[iClass] : lr * l1IntermediateBias[iClass]; dualUpdate -= adjustment; bool success = false; duals.ApplyAt(dualIndex, (long index, ref Float value) => success = Interlocked.CompareExchange(ref value, dual + dualUpdate, dual) == dual); if (success) { // Note: dualConstraint[iClass] = lambdaNInv * (sum of duals[iClass]) var primalUpdate = dualUpdate * lambdaNInv * instanceWeight; labelDual -= dual + dualUpdate; labelPrimalUpdate += primalUpdate; biasUnreg[iClass] += adjustment * lambdaNInv * instanceWeight; labelAdjustment -= adjustment; if (l1ThresholdZero) { VectorUtils.AddMult(ref features, weights[iClass].Values, -primalUpdate); biasReg[iClass] -= primalUpdate; } else { //Iterative shrinkage-thresholding (aka. soft-thresholding) //Update v=denseWeights as if there's no L1 //Thresholding: if |v[j]| < threshold, turn off weights[j] //If not, shrink: w[j] = v[i] - sign(v[j]) * threshold l1IntermediateBias[iClass] -= primalUpdate; if (Args.BiasLearningRate == 0) { biasReg[iClass] = Math.Abs(l1IntermediateBias[iClass]) - l1Threshold > 0.0 ? l1IntermediateBias[iClass] - Math.Sign(l1IntermediateBias[iClass]) * l1Threshold : 0; } if (features.IsDense) { CpuMathUtils.SdcaL1UpdateDense(-primalUpdate, features.Count, features.Values, l1Threshold, l1IntermediateWeights[iClass].Values, weights[iClass].Values); } else if (features.Count > 0) { CpuMathUtils.SdcaL1UpdateSparse(-primalUpdate, features.Count, features.Values, features.Indices, l1Threshold, l1IntermediateWeights[iClass].Values, weights[iClass].Values); } } break; } } } // Updating with label class weights and dual variable. duals[label + dualIndexInitPos] = labelDual; biasUnreg[label] += labelAdjustment * lambdaNInv * instanceWeight; if (l1ThresholdZero) { VectorUtils.AddMult(ref features, weights[label].Values, labelPrimalUpdate); biasReg[label] += labelPrimalUpdate; } else { l1IntermediateBias[label] += labelPrimalUpdate; var intermediateBias = l1IntermediateBias[label]; biasReg[label] = Math.Abs(intermediateBias) - l1Threshold > 0.0 ? intermediateBias - Math.Sign(intermediateBias) * l1Threshold : 0; if (features.IsDense) { CpuMathUtils.SdcaL1UpdateDense(labelPrimalUpdate, features.Count, features.Values, l1Threshold, l1IntermediateWeights[label].Values, weights[label].Values); } else if (features.Count > 0) { CpuMathUtils.SdcaL1UpdateSparse(labelPrimalUpdate, features.Count, features.Values, features.Indices, l1Threshold, l1IntermediateWeights[label].Values, weights[label].Values); } } rowCount++; } } }
public override IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> predicate, int n, IRandom rand = null) { Host.CheckValue(predicate, nameof(predicate)); Host.CheckValueOrNull(rand); var inputPred = _bindings.GetDependencies(predicate); var active = _bindings.GetActive(predicate); IRowCursor input; if (n > 1 && ShouldUseParallelCursors(predicate) != false) { var inputs = Source.GetRowCursorSet(out consolidator, inputPred, n); Host.AssertNonEmpty(inputs); if (inputs.Length != 1) { var cursors = new IRowCursor[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { cursors[i] = new RowCursor(Host, _bindings, inputs[i], active); } return(cursors); } input = inputs[0]; } else { input = Source.GetRowCursor(inputPred); } consolidator = null; return(new IRowCursor[] { new RowCursor(Host, _bindings, input, active) }); }
public BoolMatrix InitializeInterations(int length, int nComponents, int nInteractions, IRandom random) { BoolMatrix m = new BoolMatrix(length, nComponents); int maxBitDistance = MaximumDistance(length, nInteractions); var minBounds = new Bounds(0, length - nInteractions); var maxBounds = new Bounds(nInteractions, length - 1); for (int c = 0; c < m.Columns; c++) { int min = minBounds.Bounded(c - maxBitDistance); int max = maxBounds.Bounded(c + maxBitDistance); var indices = Enumerable.Range(min, max - min).ToList(); indices.Remove(c); m[c, c] = true; while (indices.Count > nInteractions) { indices.RemoveAt(random.Next(indices.Count)); } foreach (var i in indices) { m[i, c] = true; } } return(m); }
public Skill.Skill Select(IRandom random) { return(PostSelect(random, GetSelectableSkills())); }
public FeatureFloatVectorCursor(RoleMappedData data, CursOpt opt = CursOpt.Features, IRandom rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) { }
public Mutator(IRandom rand, double mutationProbability) { this.rand = rand; this.Mutators = new List <IElementMutator>(); this.MutationProbability = mutationProbability; }
protected static IRowCursor CreateCursor(RoleMappedData data, CursOpt opt, IRandom rand, params int[] extraCols) { Contracts.AssertValue(data); Contracts.AssertValueOrNull(rand); return(data.CreateRowCursor(opt, rand, extraCols)); }
public FloatLabelCursor(RoleMappedData data, CursOpt opt = CursOpt.Label, IRandom rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) { }
/// <summary> /// Create a row cursor for the RoleMappedData with the indicated standard columns active. /// This does not verify that the columns exist, but merely activates the ones that do exist. /// </summary> public static IRowCursor CreateRowCursor(this RoleMappedData data, CursOpt opt, IRandom rand, IEnumerable <int> extraCols = null) => data.Data.GetRowCursor(CreatePredicate(data, opt, extraCols), rand);
public StandardScalarCursor(RoleMappedData data, CursOpt opt, IRandom rand = null, params int[] extraCols) : this(CreateCursor(data, opt, rand, extraCols), data, opt) { }
public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> predicate, int n, IRandom rand = null) { _host.CheckValue(predicate, nameof(predicate)); if (HasRowData) { return(_schemaEntry.GetView().GetRowCursorSet(out consolidator, predicate, n, rand)); } consolidator = null; return(new IRowCursor[] { GetRowCursor(predicate, rand) }); }
/// <summary> /// Create a row cursor set for the RoleMappedData with the indicated standard columns active. /// This does not verify that the columns exist, but merely activates the ones that do exist. /// </summary> public static IRowCursor[] CreateRowCursorSet(this RoleMappedData data, out IRowCursorConsolidator consolidator, CursOpt opt, int n, IRandom rand, IEnumerable <int> extraCols = null) => data.Data.GetRowCursorSet(out consolidator, CreatePredicate(data, opt, extraCols), n, rand);
public Cursor(IChannelProvider provider, PartitionedFileLoader parent, IMultiStreamSource files, Func <int, bool> predicate, IRandom rand) : base(provider) { Contracts.AssertValue(parent); Contracts.AssertValue(files); Contracts.AssertValue(predicate); _parent = parent; _active = Utils.BuildArray(Schema.ColumnCount, predicate); _subActive = _active.Take(SubColumnCount).ToArray(); _colValues = new ReadOnlyMemory <char> [Schema.ColumnCount - SubColumnCount]; _subGetters = new Delegate[SubColumnCount]; _getters = CreateGetters(); _fileOrder = CreateFileOrder(rand).GetEnumerator(); }
public MultiClassLabelCursor(int classCount, RoleMappedData data, CursOpt opt = CursOpt.Label, IRandom rand = null, params int[] extraCols) : this(classCount, CreateCursor(data, opt, rand, extraCols), data, opt) { }
public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> needCol, int n, IRandom rand = null) { consolidator = null; var cursor = new Cursor(_host, this, _files, needCol, rand); return(new IRowCursor[] { cursor }); }
/// <summary> /// Constructs a new FrequencyIntGenerator object. The given elements and frequencies /// together describe a piecewise linear distribution. /// </summary> /// <param name="relativeFrequencies">The (relative) frequency to generate integers at. The size of this /// sequence determines which frequencies are being generated. If the size is n, then integers from 0 /// to n - 1 are generated.</param> /// <param name="random">The random generator to use.</param> public FrequencyIntGenerator(IEnumerable <float> relativeFrequencies, IRandom random) { float[] frequencies = relativeFrequencies as float[] ?? relativeFrequencies.ToArray(); if (frequencies.Length == 0) { throw new ArgumentException("Array cannot be empty"); } if (frequencies.Length == 1) { if (frequencies[0] <= 0) { throw new ArgumentException("Sum of frequencies cannot be 0"); } buckets = new[] { 1f }; indices0 = new[] { 0 }; indices1 = new[] { 0 }; } float sum = frequencies.Sum(); if (sum <= 0) { throw new ArgumentException("Sum of frequencies cannot be 0"); } if (frequencies.Any(x => x < 0)) { throw new Exception("Frequencies must be non-negative"); } float[] absoluteProbabilities = frequencies.Select(x => x / sum * frequencies.Length).ToArray(); buckets = new float[absoluteProbabilities.Length]; indices0 = Enumerable.Range(0, absoluteProbabilities.Length).ToArray(); indices0 = indices0.OrderBy(i => absoluteProbabilities[i]).ToArray(); int leftIndex = 0; int rightIndex = absoluteProbabilities.Length - 1; indices1 = new int[indices0.Length]; while (leftIndex <= rightIndex) { buckets[leftIndex] = absoluteProbabilities[indices0[leftIndex]]; absoluteProbabilities[indices0[leftIndex]] = 0; absoluteProbabilities[indices0[rightIndex]] -= (1 - buckets[leftIndex]); indices1[leftIndex] = indices0[rightIndex]; leftIndex++; indices0 = indices0 .Take(leftIndex) .Concat( indices0.Skip(leftIndex).OrderBy(i => absoluteProbabilities[i])) .ToArray(); } this.random = random; }