public void TestRandomChoice() { XCRMParser parser = new XCRMParser(); System.Xml.XmlElement xmlelement = RoarExtensions.CreateXmlElement( "<random_choice>" + "<choice weight=\"78\">" + "<modifier/>" + "<requirement/>" + "</choice>" + "<choice weight=\"12\">" + "<modifier/>" + "<requirement/>" + "</choice>" + "</random_choice>" ); Assert.IsNotNull(xmlelement); List <Modifier> rc1_modifiers = new List <Modifier>(); System.Xml.XmlNode rc1_modifier_node = xmlelement.SelectSingleNode("./choice[1]/modifier"); Assert.IsNotNull(rc1_modifier_node); List <Requirement> rc1_requirements = new List <Requirement>(); System.Xml.XmlNode rc1_requirement_node = xmlelement.SelectSingleNode("./choice[1]/requirement"); Assert.IsNotNull(rc1_requirement_node); List <Modifier> rc2_modifiers = new List <Modifier>(); System.Xml.XmlNode rc2_modifier_node = xmlelement.SelectSingleNode("./choice[2]/modifier"); Assert.IsNotNull(rc2_modifier_node); List <Requirement> rc2_requirements = new List <Requirement>(); System.Xml.XmlNode rc2_requirement_node = xmlelement.SelectSingleNode("./choice[2]/requirement"); Assert.IsNotNull(rc2_requirement_node); parser.crm = mockery.NewMock <IXCRMParser>(); Expect.Once.On(parser.crm).Method("ParseModifierList").With(rc1_modifier_node).Will(Return.Value(rc1_modifiers)); Expect.Once.On(parser.crm).Method("ParseRequirementList").With(rc1_requirement_node).Will(Return.Value(rc1_requirements)); Expect.Once.On(parser.crm).Method("ParseModifierList").With(rc2_modifier_node).Will(Return.Value(rc2_modifiers)); Expect.Once.On(parser.crm).Method("ParseRequirementList").With(rc2_requirement_node).Will(Return.Value(rc2_requirements)); RandomChoice m = parser.ParseAModifier(xmlelement) as RandomChoice; mockery.VerifyAllExpectationsHaveBeenMet(); Assert.IsNotNull(m); Assert.AreEqual(2, m.choices.Count); Assert.AreEqual(78, m.choices[0].weight); Assert.AreSame(rc1_modifiers, m.choices[0].modifiers); Assert.AreSame(rc1_requirements, m.choices[0].requirements); Assert.AreEqual(12, m.choices[1].weight); Assert.AreSame(rc2_modifiers, m.choices[1].modifiers); Assert.AreSame(rc2_requirements, m.choices[1].requirements); }
internal virtual GPNode CreateTreeOfType(IEvolutionState state, int thread, GPInitializer initializer, int functionset, int type, int size, IMersenneTwister mt) { //System.out.PrintLn("" + functionset + " " + type + " " + size); var choice = RandomChoice.PickFromDistribution(ROOT_D[functionset][type][size], ROOT_D[functionset][type][size][0], mt.NextDouble()); var node = ROOT_D[functionset][type][size][choice].Node.LightClone(); node.ResetNode(state, thread); // give ERCs a chance to randomize //System.out.PrintLn("Size: " + size + "Rooted: " + node); if (node.Children.Length == 0 && size != 1) // uh oh { Console.Out.WriteLine("Size: " + size + " Node: " + node); for (var x = 0; x < ROOT_D[functionset][type][size].Length; x++) { Console.Out.WriteLine("" + x + (ROOT_D[functionset][type][size][x].Node) + " " + ROOT_D[functionset][type][size][x].Prob); } } if (size > 1) { // nonterminal FillNodeWithChildren(state, thread, initializer, functionset, node, ROOT_D[functionset][type][size][choice].Node, 0, size - 1, mt); } return(node); }
private static int[] Select(IList <int[]> permutations, int size) { var total = 0; long denominator = 1; int[] current; var quantity = new int[permutations.Count]; for (var counter1 = 0; counter1 < permutations.Count; counter1++) { current = permutations[counter1]; long residue = size; // Quick internal calculations for (var counter2 = 0; counter2 < current.Length; counter2++) { residue -= current[counter2]; denominator *= Fact(current[counter2]); } quantity[counter1] = (int)(Fact(size - 1) / (denominator * Fact(residue))); total += quantity[counter1]; } var prob = new double[quantity.Length]; // quantities found... now build array for probabilities for (var counter1 = 0; counter1 < quantity.Length; counter1++) { prob[counter1] = quantity[counter1] / (double)total; // I don't think we need to check for negative values here -- Sean } RandomChoice.OrganizeDistribution(prob); var selection = RandomChoice.PickFromDistribution(prob, 0.0, 7); return(permutations[selection]); }
void EightBall(ITriggerMsg e) { string choice = RandomChoice.Shake8Ball(); irc.SendMessage(e.ReturnTo, "The Magic 8-Ball says..."); // Wait for 1.5 seconds. Thread.Sleep(1500); irc.SendMessage(e.ReturnTo, choice + "."); }
void Choose(ITriggerMsg e) { string choice = RandomChoice.RndChoice(e.Arguments); if (choice != null) { e.Reply(choice); } }
public virtual int PickSize(IEvolutionState state, int thread, int functionset, int type) { if (UseTrueDistribution) { return(RandomChoice.PickFromDistribution(TrueSizes[functionset][type], state.Random[thread].NextDouble())); } return(base.PickSize(state, thread)); }
public void Random_Defector_Step() { bool differentResult = true; var p = new RandomChoice(); validateRandomChoice(StrategyChoice.Defect, ref differentResult, ref p); Assert.IsTrue(differentResult, "didn't see a random in 5 occurences"); }
/// <summary> /// Finalize the structure and set the probabilities. /// </summary> public void FinalizeStructure() { double[] d = new double[Count]; for (int i = 0; i < Count; i++) { d[i] = list[i].probability; } this.chooser = new RandomChoice(d); }
/// <summary> /// Assuming that either ResetMinSize and ResetMaxSize, or SizeDistribution, is defined, /// picks a random size from ResetMinSize...ResetMaxSize inclusive, or randomly /// from SizeDistribution. /// </summary> public virtual int PickSize(IEvolutionState state, int thread) { if (SizeDistribution != null) { // pick from distribution return(RandomChoice.PickFromDistribution(SizeDistribution, state.Random[thread].NextDouble())); } // pick from ResetMinSize...ResetMaxSize return(state.Random[thread].NextInt(ResetMaxSize - ResetMinSize + 1) + ResetMinSize); }
public override int Produce(int subpop, IEvolutionState state, int thread) { // pick a coin toss if (state.Random[thread].NextBoolean(Gets_N_Percent)) { // over -- SortedFitUnder.length to SortedPop.length return(SortedPop[SortedFitUnder.Length + RandomChoice.PickFromDistribution(SortedFitOver, state.Random[thread].NextDouble())]); } // under -- 0 to SortedFitUnder.length return(SortedPop[RandomChoice.PickFromDistribution(SortedFitUnder, state.Random[thread].NextDouble())]); }
/// <summary> /// Completely override FitProportionateSelection.prepareToProduce /// </summary> public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { base.PrepareToProduce(s, subpop, thread); // load fitnesses Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; double meanSum = 0; double squaredDeviationsSum = 0; for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = s.Population.Subpops[subpop].Individuals[x].Fitness.Value; if (Fitnesses[x] < 0) // uh oh { s.Output.Fatal("Discovered a negative fitness value. SigmaScalingSelection requires that all fitness values be non-negative(offending subpopulation #" + subpop + ")"); } } // Calculate meanFitness for (var x = 0; x < Fitnesses.Length; x++) { meanSum = meanSum + Fitnesses[x]; } var meanFitness = meanSum / Fitnesses.Length; // Calculate sum of squared deviations for (var x = 0; x < Fitnesses.Length; x++) { squaredDeviationsSum = squaredDeviationsSum + Math.Pow(Fitnesses[x] - meanFitness, 2); } var sigma = Math.Sqrt(squaredDeviationsSum / (Fitnesses.Length - 1)); // Fill fitnesses[] with sigma scaled fitness values for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = (double)SigmaScaledValue(Fitnesses[x], meanFitness, sigma, s); // adjust the fitness proportion according to sigma scaling. // Sigma scaling formula can return negative values, this is unacceptable for fitness proportionate style selection... // so we must substitute the fitnessFloor (some value >= 0) when a sigma scaled fitness <= fitnessFloor is encountered. if (Fitnesses[x] < _fitnessFloor) { Fitnesses[x] = _fitnessFloor; } } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
/// <summary> /// Assuming that either minSize and maxSize, or sizeDistribution, is defined, /// picks a random size from minSize...maxSize inclusive, or randomly /// from sizeDistribution. /// </summary> public virtual int PickSize(IEvolutionState state, int thread) { if (MinSize > 0) { // pick from minSize...maxSize return(state.Random[thread].NextInt(MaxSize - MinSize + 1) + MinSize); } if (SizeDistribution != null) { // pick from distribution return(RandomChoice.PickFromDistribution(SizeDistribution, state.Random[thread].NextFloat(), CHECK_BOUNDARY) + 1); } throw new ApplicationException("Neither minSize nor sizeDistribution is defined in GPNodeBuilder"); }
void Countdown(string target, int seconds) { const string stdLaunch = "Liftoff!"; string launch = RandomChoice.ChooseRndItem(conf.LaunchChoices) ?? stdLaunch; irc.SendMessage(target, "Commencing Countdown"); Thread.Sleep(500); for (int tminus = seconds; tminus > 0; tminus--) { irc.SendMessage(target, tminus.ToString()); Thread.Sleep(1000); } irc.SendMessage(target, launch); }
/// <summary> /// Completely override FitProportionateSelection.prepareToProduce. /// </summary> public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { // load fitnesses Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; for (var x = 0; x < Fitnesses.Length; x++) { // adjust the fitness proportion according to current temperature. Fitnesses[x] = (double)BoltzmannExpectedValue(s.Population.Subpops[subpop].Individuals[x].Fitness.Value, s); if (Fitnesses[x] < 0) // uh oh { s.Output.Fatal("Discovered a negative fitness value. BoltzmannnSelection requires that all fitness values be non-negative(offending subpopulation #" + subpop + ")"); } } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
private static void validateRandomChoice(StrategyChoice startStrategy, ref bool differentResult, ref RandomChoice p) { //Check one of the two valid values is found. // bool foundValidValue = false; var x = p.Step(startStrategy); for (int i = 0; i < 5; i++) { p = new RandomChoice(); var xr = p.Step(startStrategy); if (x != xr) { differentResult = true; break; } } }
internal virtual void FillNodeWithChildren(IEvolutionState state, int thread, GPInitializer initializer, int functionset, GPNode parent, GPNode parentc, int pickchild, int outof, IMersenneTwister mt) { if (pickchild == parent.Children.Length - 1) { parent.Children[pickchild] = CreateTreeOfType(state, thread, initializer, functionset, parent.Constraints(initializer).ChildTypes[pickchild].Type, outof, mt); } else { var size = RandomChoice.PickFromDistribution(CHILD_D[functionset][IntForNode(parentc)][outof][pickchild], mt.NextDouble()); parent.Children[pickchild] = CreateTreeOfType(state, thread, initializer, functionset, parent.Constraints(initializer).ChildTypes[pickchild].Type, size, mt); FillNodeWithChildren(state, thread, initializer, functionset, parent, parentc, pickchild + 1, outof - size, mt); } parent.Children[pickchild].Parent = parent; parent.Children[pickchild].ArgPosition = (sbyte)pickchild; }
// don't need clone etc. #region Operations public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { base.PrepareToProduce(s, subpop, thread); // load sortedFit Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = s.Population.Subpops[subpop].Individuals[x].Fitness.Value; if (Fitnesses[x] < 0) { // uh oh s.Output.Fatal("Discovered a negative fitness value." + " FitProportionateSelection requires that all fitness values be non-negative(offending subpop #" + subpop + ")"); } } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
public void TimerCallback(object obj) { try { var json = RandomChoice.SomethingActionsWithUsers(); var n = _random.Next(0, 2); if (n == 0) { JsonToFile.WriteJson(json); } else { _jsonToRabbit.AddToRabbit(json); } } finally { _timer.Change(2000, Timeout.Infinite); } }
// don't need clone etc. public override void PrepareToProduce(IEvolutionState s, int subpop, int thread) { base.PrepareToProduce(s, subpop, thread); LastIndex = 0; Steps = 0; Fitnesses = new double[s.Population.Subpops[subpop].Individuals.Count]; // compute offset Offset = (double)(s.Random[thread].NextDouble() / Fitnesses.Length); // load fitnesses but don't build distribution yet for (var x = 0; x < Fitnesses.Length; x++) { Fitnesses[x] = s.Population.Subpops[subpop].Individuals[x].Fitness.Value; if (Fitnesses[x] < 0) // uh oh { s.Output.Fatal("Discovered a negative fitness value. SUSSelection requires that all fitness values be non-negative(offending subpopulation #" + subpop + ")"); } } // construct and optionally shuffle fitness distribution and indices Indices = new int[s.Population.Subpops[subpop].Individuals.Count]; for (var i = 0; i < Indices.Length; i++) { Indices[i] = i; } if (Shuffle) { ShuffleFitnessesAndIndices(s.Random[thread], Fitnesses, Indices); } // organize the distribution. All zeros in fitness is fine RandomChoice.OrganizeDistribution(Fitnesses, true); }
// Use this for initialization private void Start() { if (searchForSpecialBehavior) { specialBehavior = GetComponent <SpawnBehavior>(); } referenceFrame = GetComponentInParent <ReferenceFrame>(); randomChoice = new RandomChoice(spawnables); if (spawnAtWakeUp.Length != 0) { for (var i = 0; i < spawnAtWakeUp.Length; i++) { for (var j = 0; j < spawnAtWakeUp[i].quantity; j++) { Spawn(spawnAtWakeUp[i].element); } } } StartCoroutine(WaitAndSpawn()); }
/// <summary> /// A private function which recursively returns a GROW tree to NewRootedTree(...) /// </summary> private GPNode Ptc1(IEvolutionState state, int current, GPType type, int thread, IGPNodeParent parent, int argPosition, GPFunctionSet funcs, IPTCFunctionSet pfuncs, double[] nonterminalSelectProbs) { // ptc1 can mess up if there are no available terminals for a given type. If this occurs, // and we find ourselves unable to pick a terminal when we want to do so, we will issue a warning, // and pick a nonterminal, violating the PTC1 size and depth contracts. This can lead to pathological situations // where the system will continue to go on and on unable to stop because it can't pick a terminal, // resulting in running out of memory or some such. But there are cases where we'd want to let // this work itself out. var triedTerminals = false; var t = type.Type; var terminals = funcs.Terminals[t]; var nonterminals = funcs.Nonterminals[t]; var nodes = funcs.Nodes[t]; if (nodes.Length == 0) { ErrorAboutNoNodeWithType(type, state); // total failure } // Now pick if we're at max depth // OR if we're below p_y // OR if there are NO nonterminals! // [first set triedTerminals] // AND if there are available terminals if (((current + 1 >= MaxDepth) || !(state.Random[thread].NextBoolean(nonterminalSelectProbs[t])) || WarnAboutNonterminal(nonterminals.Length == 0, type, false, state)) && (triedTerminals = true) && terminals.Length != 0) { var n = terminals[RandomChoice.PickFromDistribution(pfuncs.TerminalProbabilities(t), state.Random[thread].NextDouble())].LightClone(); n.ResetNode(state, thread); // give ERCs a chance to randomize n.ArgPosition = (sbyte)argPosition; n.Parent = parent; return(n); } // above p_y, pick a nonterminal by q_ny probabilities else { if (triedTerminals) { WarnAboutNoTerminalWithType(type, false, state); // we tried terminals and we're here because there were none! } var n = nonterminals[RandomChoice.PickFromDistribution(pfuncs.NonterminalProbabilities(t), state.Random[thread].NextDouble())].LightClone(); n.ResetNode(state, thread); // give ERCs a chance to randomize n.ArgPosition = (sbyte)argPosition; n.Parent = parent; // Populate the node... var childtypes = n.Constraints((GPInitializer)state.Initializer).ChildTypes; for (var x = 0; x < childtypes.Length; x++) { n.Children[x] = Ptc1(state, current + 1, childtypes[x], thread, n, x, funcs, pfuncs, nonterminalSelectProbs); } return(n); } }
/// <summary> /// Normalizes and arranges the probabilities in sources so that they /// are usable by pickRandom(...). If the sources have all zero probabilities, /// then a uniform selection is used. Negative probabilities will /// generate an ArithmeticException, as will an empty source array. /// </summary> public static void SetupProbabilities(IBreedingSource[] sources) { RandomChoice.OrganizeDistribution(sources, sources[0], true); }
public virtual void ComputePercentages() { // load ROOT_D for (var f = 0; f < NUMTREESOFTYPE.Length; f++) { for (var t = 0; t < NUMTREESOFTYPE[f].Length; t++) { for (var s = 0; s < NUMTREESOFTYPE[f][t].Length; s++) { ROOT_D[f][t][s] = new UniformGPNodeStorage[FunctionSets[f].Nodes[t].Length]; for (var x = 0; x < ROOT_D[f][t][s].Length; x++) { ROOT_D[f][t][s][x] = new UniformGPNodeStorage(); ROOT_D[f][t][s][x].Node = FunctionSets[f].Nodes[t][x]; ROOT_D[f][t][s][x].Prob = GetProb(NUMTREESROOTEDBYNODE[f][IntForNode(ROOT_D[f][t][s][x].Node)][s]); } // organize the distribution //System.out.PrintLn("Organizing " + f + " " + t + " " + s); // check to see if it's all zeros for (var x = 0; x < ROOT_D[f][t][s].Length; x++) { if (ROOT_D[f][t][s][x].Prob != 0.0) { // don't need to check for negatives here I believe RandomChoice.OrganizeDistribution(ROOT_D[f][t][s], ROOT_D[f][t][s][0]); ROOT_D_ZERO[f][t][s] = false; break; } else { ROOT_D_ZERO[f][t][s] = true; } } } } } // load CHILD_D for (var f = 0; f < NUMCHILDPERMUTATIONS.Length; f++) { for (var p = 0; p < NUMCHILDPERMUTATIONS[f].Length; p++) { for (var o = 0; o < MaxTreeSize + 1; o++) { for (var c = 0; c < MaxArity; c++) { CHILD_D[f][p][o][c] = new double[o + 1]; for (var s = 0; s < CHILD_D[f][p][o][c].Length; s++) { CHILD_D[f][p][o][c][s] = GetProb(NUMCHILDPERMUTATIONS[f][p][s][o][c]); } // organize the distribution //System.out.PrintLn("Organizing " + f + " " + p + " " + o + " " + c); // check to see if it's all zeros for (var x = 0; x < CHILD_D[f][p][o][c].Length; x++) { if (CHILD_D[f][p][o][c][x] != 0.0) { // don't need to check for negatives here I believe RandomChoice.OrganizeDistribution(CHILD_D[f][p][o][c]); break; } } } } } } }
public virtual void Preprocess(IEvolutionState state, int maxTreeSize) { state.Output.Message("Determining Tree Sizes"); MaxTreeSize = maxTreeSize; var functionSetRepository = ((GPInitializer)state.Initializer).FunctionSetRepository; // Put each function set into the arrays FunctionSets = new GPFunctionSet[functionSetRepository.Count]; FunctionSetsHash = Hashtable.Synchronized(new Hashtable()); var e = functionSetRepository.Values.GetEnumerator(); var count = 0; while (e.MoveNext()) { var funcs = (GPFunctionSet)e.Current; FunctionSetsHash[funcs] = count; FunctionSets[count++] = funcs; } // For each function set, assign each GPNode to a unique integer // so we can keep track of it (ick, this will be inefficient!) FuncNodesHash = Hashtable.Synchronized(new Hashtable()); var t_nodes = Hashtable.Synchronized(new Hashtable()); count = 0; MaxArity = 0; for (var x = 0; x < FunctionSets.Length; x++) { GPNode n; // hash all the nodes so we can remove duplicates for (var typ = 0; typ < FunctionSets[x].Nodes.Length; typ++) { for (var nod = 0; nod < FunctionSets[x].Nodes[typ].Length; nod++) { t_nodes[n = FunctionSets[x].Nodes[typ][nod]] = n; } } // rehash with Integers, yuck e = t_nodes.Values.GetEnumerator(); GPNode tmpn; while (e.MoveNext()) { tmpn = (GPNode)e.Current; if (MaxArity < tmpn.Children.Length) { MaxArity = tmpn.Children.Length; } if (!FuncNodesHash.ContainsKey(tmpn)) // don't remap the node; it'd make holes { FuncNodesHash[tmpn] = count++; } } } NumFuncNodes = FuncNodesHash.Count; var initializer = (GPInitializer)state.Initializer; var numAtomicTypes = initializer.NumAtomicTypes; var numSetTypes = initializer.NumSetTypes; var functionSetsLength = FunctionSets.Length; var atomicPlusSetTypes = numAtomicTypes + numSetTypes; var maxTreeSizePlusOne = MaxTreeSize + 1; // set up the arrays // NUMTREESOFTYPE NUMTREESOFTYPE = TensorFactory.Create <BigInteger>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // NUMTREESROOTEDBYNODE NUMTREESROOTEDBYNODE = TensorFactory.Create <BigInteger>(functionSetsLength, NumFuncNodes, maxTreeSizePlusOne); // NUMCHILDPERMUTATIONS NUMCHILDPERMUTATIONS = TensorFactory.Create <BigInteger>(functionSetsLength, NumFuncNodes, maxTreeSizePlusOne, maxTreeSizePlusOne, MaxArity); // ROOT_D ROOT_D = TensorFactory.CreateOpenEnded <UniformGPNodeStorage>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // 4D OpenEnded // ROOT_D_ZERO ROOT_D_ZERO = TensorFactory.Create <bool>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // CHILD_D CHILD_D = TensorFactory.CreateOpenEnded <double>(functionSetsLength, NumFuncNodes, maxTreeSizePlusOne, maxTreeSizePlusOne); // 5D OpenEnded var types = ((GPInitializer)(state.Initializer)).Types; // _TrueSizesBigInt TrueSizesBigInt = TensorFactory.Create <BigInteger>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // Go through each function set and determine numbers // (this will take quite a while! Thankfully it's offline) for (var x = 0; x < FunctionSets.Length; x++) { for (var y = 0; y < numAtomicTypes + numSetTypes; y++) { for (var z = 1; z <= MaxTreeSize; z++) { state.Output.Message("FunctionSet: " + FunctionSets[x].Name + ", Type: " + types[y].Name + ", Size: " + z + " num: " + (TrueSizesBigInt[x][y][z] = NumTreesOfType(initializer, x, y, z))); } } } state.Output.Message("Compiling Distributions"); TrueSizes = TensorFactory.Create <double>(functionSetsLength, atomicPlusSetTypes, maxTreeSizePlusOne); // convert to doubles and organize distribution for (var x = 0; x < FunctionSets.Length; x++) { for (var y = 0; y < numAtomicTypes + numSetTypes; y++) { for (var z = 1; z <= MaxTreeSize; z++) { TrueSizes[x][y][z] = (double)TrueSizesBigInt[x][y][z]; // BRS : DOES THIS TRUNCATE ANYTHING ??? } // and if this is all zero (a possibility) we should be forgiving (hence the 'true') -- I *think* RandomChoice.OrganizeDistribution(TrueSizes[x][y], true); } } // compute our percentages ComputePercentages(); }
// Use this for initialization private void Start() { if (searchForSpecialBehavior) specialBehavior = GetComponent<SpawnBehavior>(); referenceFrame = GetComponentInParent<ReferenceFrame>(); randomChoice = new RandomChoice(spawnables); if (spawnAtWakeUp.Length != 0) { for (var i = 0; i < spawnAtWakeUp.Length; i++) { for (var j = 0; j < spawnAtWakeUp[i].quantity; j++) { Spawn(spawnAtWakeUp[i].element); } } } StartCoroutine(WaitAndSpawn()); }
public override GPNode NewRootedTree(IEvolutionState state, GPType type, int thread, IGPNodeParent parent, GPFunctionSet funcs, int argPosition, int requestedSize) { // ptc2 can mess up if there are no available terminals for a given type. If this occurs, // and we find ourselves unable to pick a terminal when we want to do so, we will issue a warning, // and pick a nonterminal, violating the ptc2 size and depth contracts. This can lead to pathological situations // where the system will continue to go on and on unable to stop because it can't pick a terminal, // resulting in running out of memory or some such. But there are cases where we'd want to let // this work itself out. var triedTerminals = false; if (!(funcs is IPTCFunctionSet)) { state.Output.Fatal("Set " + funcs.Name + " is not of the class ec.gp.build.IPTCFunctionSet, and so cannot be used with PTC Nodebuilders."); } var pfuncs = (IPTCFunctionSet)funcs; // pick a size from the distribution if (requestedSize == NOSIZEGIVEN) { requestedSize = PickSize(state, thread); } GPNode root; var t = type.Type; var terminals = funcs.Terminals[t]; var nonterminals = funcs.Nonterminals[t]; var nodes = funcs.Nodes[t]; if (nodes.Length == 0) { ErrorAboutNoNodeWithType(type, state); // total failure } // return a terminal // Now pick a terminal if our size is 1 // OR if there are NO nonterminals! // [first set triedTerminals] // AND if there are available terminals if ((requestedSize == 1 || WarnAboutNonterminal(nonterminals.Length == 0, type, false, state)) && (triedTerminals = true) && terminals.Length != 0) { root = terminals[RandomChoice.PickFromDistribution(pfuncs.TerminalProbabilities(t), state.Random[thread].NextDouble())].LightClone(); root.ResetNode(state, thread); // give ERCs a chance to randomize root.ArgPosition = (sbyte)argPosition; root.Parent = parent; } // return a nonterminal-rooted tree else { if (triedTerminals) { WarnAboutNoTerminalWithType(type, false, state); // we tried terminals and we're here because there were none! } // pick a nonterminal root = nonterminals[RandomChoice.PickFromDistribution(pfuncs.NonterminalProbabilities(t), state.Random[thread].NextDouble())].LightClone(); root.ResetNode(state, thread); // give ERCs a chance to randomize root.ArgPosition = (sbyte)argPosition; root.Parent = parent; // set the depth, size, and enqueuing, and reset the random dequeue s_size = 0; // pretty critical! var s = 1; var initializer = ((GPInitializer)state.Initializer); var childtypes = root.Constraints(initializer).ChildTypes; for (var x = 0; x < childtypes.Length; x++) { Enqueue(root, x, 1); /* depth 1 */ } while (s_size > 0) { triedTerminals = false; RandomDequeue(state, thread); type = DequeueNode.Constraints(initializer).ChildTypes[DequeueArgpos]; var y = type.Type; terminals = funcs.Terminals[y]; nonterminals = funcs.Nonterminals[y]; nodes = funcs.Nodes[y]; if (nodes.Length == 0) { ErrorAboutNoNodeWithType(type, state); // total failure } // pick a terminal // if we need no nonterminal nodes // OR if we're at max depth and must pick a terminal // OR if there are NO nonterminals! // [first set triedTerminals] // AND if there are available terminals if ((s_size + s >= requestedSize || DequeueDepth == MaxDepth || WarnAboutNonterminal(nonterminals.Length == 0, type, false, state)) && (triedTerminals = true) && terminals.Length != 0) { var n = terminals[RandomChoice.PickFromDistribution(pfuncs.TerminalProbabilities(y), state.Random[thread].NextDouble())].LightClone(); DequeueNode.Children[DequeueArgpos] = n; n.ResetNode(state, thread); // give ERCs a chance to randomize n.ArgPosition = (sbyte)DequeueArgpos; n.Parent = DequeueNode; } // pick a nonterminal and enqueue its children else { if (triedTerminals) { WarnAboutNoTerminalWithType(type, false, state); // we tried terminals and we're here because there were none! } var n = nonterminals[RandomChoice.PickFromDistribution(pfuncs.NonterminalProbabilities(y), state.Random[thread].NextDouble())].LightClone(); DequeueNode.Children[DequeueArgpos] = n; n.ResetNode(state, thread); // give ERCs a chance to randomize n.ArgPosition = (sbyte)DequeueArgpos; n.Parent = DequeueNode; childtypes = n.Constraints(initializer).ChildTypes; for (var x = 0; x < childtypes.Length; x++) { Enqueue(n, x, DequeueDepth + 1); } } s++; } } return(root); }
public virtual void Setup(IEvolutionState state, IParameter paramBase) { var def = DefaultBase; // min and max size if (state.Parameters.ParameterExists(paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE))) { if (!(state.Parameters.ParameterExists(paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE)))) { state.Output.Fatal("This GPNodeBuilder has a " + P_MINSIZE + " but not a " + P_MAXSIZE + "."); } MinSize = state.Parameters.GetInt(paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE), 1); if (MinSize == 0) { state.Output.Fatal("The GPNodeBuilder must have a min size >= 1.", paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE)); } MaxSize = state.Parameters.GetInt(paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE), 1); if (MaxSize == 0) { state.Output.Fatal("The GPNodeBuilder must have a max size >= 1.", paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE)); } if (MinSize > MaxSize) { state.Output.Fatal("The GPNodeBuilder must have min size <= max size.", paramBase.Push(P_MINSIZE), def.Push(P_MINSIZE)); } } else if (state.Parameters.ParameterExists(paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE))) { state.Output.Fatal("This GPNodeBuilder has a " + P_MAXSIZE + " but not a " + P_MINSIZE + ".", paramBase.Push(P_MAXSIZE), def.Push(P_MAXSIZE)); } // load sizeDistribution else if (state.Parameters.ParameterExists(paramBase.Push(P_NUMSIZES), def.Push(P_NUMSIZES))) { var siz = state.Parameters.GetInt(paramBase.Push(P_NUMSIZES), def.Push(P_NUMSIZES), 1); if (siz == 0) { state.Output.Fatal("The number of sizes in the GPNodeBuilder's distribution must be >= 1. "); } SizeDistribution = new double[siz]; if (state.Parameters.ParameterExists(paramBase.Push(P_SIZE).Push("0"), def.Push(P_SIZE).Push("0"))) { state.Output.Warning("GPNodeBuilder does not use size #0 in the distribution", paramBase.Push(P_SIZE).Push("0"), def.Push(P_SIZE).Push("0")); } var sum = 0.0; for (var x = 0; x < siz; x++) { SizeDistribution[x] = state.Parameters.GetDouble(paramBase.Push(P_SIZE).Push("" + (x + 1)), def.Push(P_SIZE).Push("" + (x + 1)), 0.0f); if (SizeDistribution[x] < 0.0) { state.Output.Warning("Distribution value #" + x + " negative or not defined, assumed to be 0.0", paramBase.Push(P_SIZE).Push("" + (x + 1)), def.Push(P_SIZE).Push("" + (x + 1))); SizeDistribution[x] = 0.0; } sum += SizeDistribution[x]; } if (sum > 1.0) { state.Output.Warning("Distribution sums to greater than 1.0", paramBase.Push(P_SIZE), def.Push(P_SIZE)); } if (sum.Equals(0.0)) { state.Output.Fatal("Distribution is all 0's", paramBase.Push(P_SIZE), def.Push(P_SIZE)); } // normalize and prepare RandomChoice.OrganizeDistribution(SizeDistribution); } }
public override void Setup(IEvolutionState state, IParameter paramBase) { base.Setup(state, paramBase); // load our probabilities here. q_ny = new double[Nonterminals.Length][]; q_ty = new double[Terminals.Length][]; var allOnes = true; var noOnes = true; var allZeros = true; var initializer = ((GPInitializer)state.Initializer); for (var type = 0; type < Nonterminals.Length; type++) { q_ny[type] = new double[Nonterminals[type].Length]; for (var x = 0; x < Nonterminals[type].Length; x++) { q_ny[type][x] = Nonterminals[type][x].Constraints(initializer).ProbabilityOfSelection; if (q_ny[type][x] != 0.0) { allZeros = false; } if (q_ny[type][x] == 1.0) { noOnes = false; } else { allOnes = false; } } } if (allZeros) { state.Output.Warning("In this function set, the probabilities of all nonterminal functions have a 0.0 selection probability" + " -- this will cause them all to be selected uniformly. That could be an error.", paramBase); } // BRS : TODO : Investigate the "allZeroes" logic as described below... // In ECJ v20 the following is reinitialized to false, // but I think that is a BUG because it is about to check again // and set it to false if any of the probabilities do NOT equal zero. // allZeros = false; // I'm setting this to true for the reason described above! allZeros = true; for (var type = 0; type < Terminals.Length; type++) { q_ty[type] = new double[Terminals[type].Length]; for (var x = 0; x < Terminals[type].Length; x++) { q_ty[type][x] = Terminals[type][x].Constraints(initializer).ProbabilityOfSelection; if (q_ty[type][x] != 0.0) { allZeros = false; } if (q_ty[type][x] == 1.0) { noOnes = false; } else { allOnes = false; } } } if (allZeros) { state.Output.Warning("In this function set, the probabilities of all terminal functions have a 0.0 selection probability" + " -- this will cause them all to be selected uniformly. That could be an error.", paramBase); } if (!allOnes && !noOnes) { state.Output.Warning("In this function set, there are some functions with a selection probability of 1.0," + " but not all of them. That could be an error.", paramBase); } // set up our node probabilities. Allow all zeros. for (var x = 0; x < q_ty.Length; x++) { if (q_ty[x].Length == 0) { state.Output.Warning("Function Set " + Name + " has no terminals for type number " + x + ". This may cause problems for you."); } else { RandomChoice.OrganizeDistribution(q_ty[x], true); } if (q_ny[x].Length == 0) { state.Output.Warning("Function Set " + Name + " has no nonterminals for type number " + x + ". This may cause problems for you."); } else { RandomChoice.OrganizeDistribution(q_ny[x], true); } } // set up cache p_y = new double[CACHE_SIZE][]; }
/// <summary> /// Sets up all the RuleSetConstraints, loading them from the parameter /// file. This must be called before anything is called which refers /// to a type by Name. /// </summary> public virtual void Setup(IEvolutionState state, IParameter paramBase) { // What's my name? Name = state.Parameters.GetString(paramBase.Push(P_NAME), null); if (Name == null) { state.Output.Fatal("No name was given for this RuleSetConstraints.", paramBase.Push(P_NAME)); } // Register me var tempObject = ((RuleInitializer)state.Initializer).RuleSetConstraintRepository[Name]; ((RuleInitializer)state.Initializer).RuleSetConstraintRepository[Name] = this; var oldConstraints = (RuleSetConstraints)(tempObject); if (oldConstraints != null) { state.Output.Fatal("The rule constraints \"" + Name + "\" has been defined multiple times.", paramBase.Push(P_NAME)); } // load my prototypical Rule RulePrototype = (Rule)(state.Parameters.GetInstanceForParameter(paramBase.Push(P_RULE), null, typeof(Rule))); RulePrototype.Setup(state, paramBase.Push(P_RULE)); p_add = state.Parameters.GetDouble(paramBase.Push(P_ADD_PROB), null, 0); if (p_add < 0 || p_add > 1) { state.Output.Fatal("Parameter not found, or its value is outside of allowed range [0..1].", paramBase.Push(P_ADD_PROB)); } p_del = state.Parameters.GetDouble(paramBase.Push(P_DEL_PROB), null, 0); if (p_del < 0 || p_del > 1) { state.Output.Fatal("Parameter not found, or its value is outside of allowed range [0..1].", paramBase.Push(P_DEL_PROB)); } p_randorder = state.Parameters.GetDouble(paramBase.Push(P_RAND_ORDER_PROB), null, 0); if (p_randorder < 0 || p_randorder > 1) { state.Output.Fatal("Parameter not found, or its value is outside of allowed range [0..1].", paramBase.Push(P_RAND_ORDER_PROB)); } // now, we are going to load EITHER min/max size OR a size distribution, or both // (the size distribution takes precedence) // reset min and max size if (state.Parameters.ParameterExists(paramBase.Push(P_RESETMINSIZE), null) || state.Parameters.ParameterExists(paramBase.Push(P_RESETMAXSIZE), null)) { if (!(state.Parameters.ParameterExists(paramBase.Push(P_RESETMAXSIZE), null))) { state.Output.Error("This RuleSetConstraints has a " + P_RESETMINSIZE + " but not a " + P_RESETMAXSIZE + "."); } ResetMinSize = state.Parameters.GetInt(paramBase.Push(P_RESETMINSIZE), null, 0); if (ResetMinSize == -1) { state.Output.Error("If min&max are defined, RuleSetConstraints must have a min size >= 0.", paramBase.Push(P_RESETMINSIZE), null); } ResetMaxSize = state.Parameters.GetInt(paramBase.Push(P_RESETMAXSIZE), null, 0); if (ResetMaxSize == -1) { state.Output.Error("If min&max are defined, RuleSetConstraints must have a max size >= 0.", paramBase.Push(P_RESETMAXSIZE), null); } if (ResetMinSize > ResetMaxSize) { state.Output.Error("If min&max are defined, RuleSetConstraints must have min size <= max size.", paramBase.Push(P_RESETMINSIZE), null); } state.Output.ExitIfErrors(); } // load SizeDistribution if (state.Parameters.ParameterExists(paramBase.Push(P_NUMSIZES), null)) { var siz = state.Parameters.GetInt(paramBase.Push(P_NUMSIZES), null, 1); if (siz == 0) { state.Output.Fatal("The number of sizes in the RuleSetConstraints's distribution must be >= 1. "); } SizeDistribution = new double[siz]; var sum = 0.0; for (var x = 0; x < siz; x++) { SizeDistribution[x] = state.Parameters.GetDouble(paramBase.Push(P_RESETSIZE).Push("" + x), null, 0.0); if (SizeDistribution[x] < 0.0) { state.Output.Warning("Distribution value #" + x + " negative or not defined, assumed to be 0.0", paramBase.Push(P_RESETSIZE).Push("" + x), null); SizeDistribution[x] = 0.0f; } sum += SizeDistribution[x]; } if (sum > 1.0) { state.Output.Warning("Distribution sums to greater than 1.0", paramBase.Push(P_RESETSIZE), null); } if (sum == 0.0) { state.Output.Fatal("Distribution is all 0's", paramBase.Push(P_RESETSIZE), null); } // normalize and prepare RandomChoice.OrganizeDistribution(SizeDistribution); } MinSize = state.Parameters.ParameterExists(paramBase.Push(P_MINSIZE), null) ? state.Parameters.GetInt(paramBase.Push(P_MINSIZE), null, 0) : 0; MaxSize = state.Parameters.ParameterExists(paramBase.Push(P_MAXSIZE), null) ? state.Parameters.GetInt(paramBase.Push(P_MAXSIZE), null, 0) : int.MaxValue; // sanity checks if (MinSize > MaxSize) { state.Output.Fatal("Cannot have min size greater than max size : (" + MinSize + " > " + MaxSize + ")", paramBase.Push(P_MINSIZE), null); } if (SizeDistribution != null) { if (MinSize != 0) { state.Output.Fatal("Using size distribution, but min size is not 0", paramBase.Push(P_MINSIZE), null); } if (SizeDistribution.Length - 1 > MaxSize) { state.Output.Fatal("Using size distribution whose maximum size is higher than max size", paramBase.Push(P_MAXSIZE), null); } } else { if (ResetMinSize < MinSize) { state.Output.Fatal("Cannot have min size greater than reset min size : (" + MinSize + " > " + ResetMinSize + ")", paramBase.Push(P_MINSIZE), null); } if (ResetMaxSize > MaxSize) { state.Output.Fatal("Cannot have max size less than reset max size : (" + MaxSize + " > " + ResetMaxSize + ")", paramBase.Push(P_MAXSIZE), null); } } }
/// <summary> /// Picks a random source from an array of sources, with their /// probabilities normalized and summed as follows: For example, /// if four /// breeding source probabilities are {0.3, 0.2, 0.1, 0.4}, then /// they should get normalized and summed by the outside owners /// as: {0.3, 0.5, 0.6, 1.0}. /// </summary> public static int PickRandom(IBreedingSource[] sources, double prob) { return(RandomChoice.PickFromDistribution(sources, sources[0], prob)); }
public override int Produce(int subpop, IEvolutionState state, int thread) { // Pick and return an individual from the population return(RandomChoice.PickFromDistribution(Fitnesses, state.Random[thread].NextDouble())); }