public static double NumericDistance(CombinatorialSemantics desired, Semantics candidate, IEnumerable <Semantics> allCandidates) { double distance = 0.0; for (int i = 0; i < Math.Min(desired.Length, candidate.Length); i++) { var desiredSemantics = desired[i]; foreach (var value in desiredSemantics) { if (candidate[i] == null) { // if candidate has no semantic value, take the worst distance of the other candidates // if no other candidate has a semantic value, ignore i'th value /*bool distanceFound; * double worstDistance = GetWorstDistance((int) value, allCandidates, i, out distanceFound); * if (distanceFound) { * distance += worstDistance; * }*/ } else { var dist = Math.Abs((int)value - (int)candidate[i]); distance += dist; } } } return(distance); }
public bool Operate(Semantics resultSemantics, Individual individual, ISemanticSubTreePool subTreePool, int maxTreeDepth, out bool triedBackPropagation) { SyntaxTree tree = (SyntaxTree)individual.SyntaxTree.DeepClone(); TreeNode exchangeNode = tree.GetRandomNode(); Type nodeType = exchangeNode.Type; TreeNode root; if (exchangeNode.IsBackPropagable(out root)) { triedBackPropagation = true; var desiredSemantics = DoSemanticBackPropagation(resultSemantics, exchangeNode, root); TreeNode newNode = DoLibrarySearch(individual, subTreePool, nodeType, desiredSemantics); if (newNode != null) { bool replaced = tree.ReplaceTreeNode(exchangeNode, newNode); if (replaced && tree.Height <= maxTreeDepth) { individual.SyntaxTree = tree; return(true); } return(false); } } triedBackPropagation = false; return(false); }
private Semantics GetRandom(int length) { var semantics = new Semantics(length); for (int i = 0; i < length; i++) { semantics[i] = RandomValueGenerator.Instance.GetBool(); } return(semantics); }
public static double NumericDistance(Semantics semantics1, Semantics semantics2) { double distance = 0.0; for (int i = 0; i < Math.Min(semantics1.Length, semantics2.Length); i++) { double s1 = (double)semantics1[i]; double s2 = (double)semantics2[i]; distance += Math.Abs(s1 - s2); } return(distance); }
// Algorithm details: Pawlak et al. - Competent Geometric Semantic GP page 187 public Semantics GetMidpoint(Semantics semantics1, Semantics semantics2) { int length = semantics1.Length; Semantics midpoint = new Semantics(length); for (int i = 0; i < length; i++) { int s1 = semantics1[i] != null ? (int)semantics1[i] : 0; // use 0 if no semantics evaluated int s2 = semantics2[i] != null ? (int)semantics2[i] : 0; midpoint[i] = (s1 + s2) / 2; } return(midpoint); }
// Algorithm details: Pawlak et al. - Competent Geometric Semantic GP page 187 public Semantics GetMidpoint(Semantics semantics1, Semantics semantics2) { int length = semantics1.Length; var midpoint = new Semantics(length); var random = GetRandom(length); for (int i = 0; i < length; i++) { bool s1 = semantics1[i] != null ? (bool)semantics1[i] : false; // use false if no semantics evaluated bool s2 = semantics2[i] != null ? (bool)semantics2[i] : false; bool sx = (bool)random[i]; midpoint[i] = (s1 && sx) || (!sx && s2); } return(midpoint); }
public static double HammingDistance(CombinatorialSemantics desired, Semantics candidate, IEnumerable <Semantics> allCandidates) { double distance = 0.0; for (int i = 0; i < Math.Min(desired.Length, candidate.Length); i++) { var semantics = desired[i]; foreach (var value in semantics) { if (candidate == null || !value.Equals(candidate[i])) { distance++; } } } return(distance); }
// Pawlak et al. Algorithm 1 in paper Semantic Backpropagation for Designing Search Operators in GP: // Naming: n ... node, p ... root, t ... target // semantic types may be mixed up in expression: e.g. bool ret = (a + b) == (c - a) // => have to use object type instead of one generic type public CombinatorialSemantics Propagate(TreeNode root, TreeNode node, Semantics target) { CombinatorialSemantics semantics = new CombinatorialSemantics(target.Length); var path = root.GetPathTo(node); for (int i = 0; i < target.Length; i++) // for all ti element of t do: { ISet <object> currentValueSet = new HashSet <object>(); // Di currentValueSet.Add(target[i]); // Di <- { ti } var currentNode = root; // a <- p bool ambiguityFound = false; // * not element of Di int pathIndex = 0; // index of path element while (currentNode != node && currentValueSet.Count > 0 && !ambiguityFound) { int k = currentNode.Children.IndexOf(path[pathIndex]); ISet <object> valueSet = new HashSet <object>(); // D' <- {} var invertibleExpr = currentNode as IInvertible; // if not invertible, loop will end if (invertibleExpr != null && invertibleExpr.IsInvertible) { foreach (var desiredValue in currentValueSet) { bool ambiguous; var complementValue = invertibleExpr.GetComplementValue(k, i); if (complementValue != null) // complement value can be null if semantics not evaluated { valueSet.UnionWith(invertibleExpr.Invert(desiredValue, k, complementValue, out ambiguous)); if (ambiguous) { ambiguityFound = true; } } } } // valueSet is empty in case of ambiguity (* element of D') currentValueSet = valueSet; // Di <- D' currentNode = path[pathIndex++]; // a <- Child(a, n) } semantics[i] = currentValueSet; } return(semantics); }
private void AssignSemanticsToTreeNodes(Individual individual, MDLFitnessResult fitnessResult) { var dataset = fitnessResult.Dataset; int testCount = dataset.Count; foreach (var id in dataset.Features) { var node = individual.SyntaxTree.FindNodeById(id); var semanticsNode = node as ISemanticsHolder; if (semanticsNode != null) { var semantics = new Semantics(testCount); for (int i = 0; i < testCount; i++) { semantics[i] = dataset[id][i]; } semanticsNode.Semantics = semantics; } } }
public static double HammingDistance(Semantics semantics1, Semantics semantics2) { double distance = 0.0; for (int i = 0; i < Math.Min(semantics1.Length, semantics2.Length); i++) { if (semantics1[i] == null || semantics2[i] == null) { if (semantics1[i] == null && semantics2[i] != null || semantics1[i] != null && semantics2[i] == null) { distance++; } } else if (!semantics1[i].Equals(semantics2[i])) { distance++; } } return(distance); }
private Individual GenerateChildren(Individual individual1, Individual individual2, Semantics midpoint, out bool triedBackPropagation) { var child1 = new Individual(individual1); child1.FitnessEvaluated = false; var mutated = ResultSemanticsOperator.Operate(midpoint, child1, SubTreePool, MaxTreeDepth, out triedBackPropagation); double fitnessChange = 0; if (mutated && triedBackPropagation && individual1.FitnessEvaluated && FitnessEvaluator != null) { double fitness = FitnessEvaluator.Evaluate(child1, Problem).Fitness; fitnessChange = fitness - individual1.Fitness; } Statistics.Instance.AddBackpropagationAttemptCrossover(triedBackPropagation, fitnessChange); return(mutated ? child1 : individual1); }
protected virtual CombinatorialSemantics DoSemanticBackPropagation(Semantics resultSemantics, TreeNode exchangeNode, TreeNode root) { return(SemanticBackPropagator.Propagate(root, exchangeNode, resultSemantics)); }