// /param mustHalt has the SLIM-RNN to halt to be a valid solution? public void search(uint maximalIteration, bool mustHalt, out bool wasSolved, out SlimRnn solutionRnn) { wasSolved = false; solutionRnn = null; slimRnn.learningAlgorithm = this; for (uint levinSearchIteration = 1; levinSearchIteration <= maximalIteration; levinSearchIteration++) { iteration(levinSearchIteration, mustHalt, out wasSolved, out solutionRnn); if (wasSolved) { goto reset; } } reset: slimRnn.learningAlgorithm = null; }
public static void debugConnections(SlimRnn slimRnn) { Console.WriteLine("SlimRnn"); int neuronIndex = 0; foreach (SlimRnnNeuron iNeuron in slimRnn.neurons) { Console.WriteLine(" neuron idx={0}", neuronIndex); int connectionIndex = 0; foreach (var iConnection in iNeuron.outNeuronsWithWeights) { Console.WriteLine(" connection idx={0} wasUsed={1} weight={2}", connectionIndex, iConnection.wasUsed, iConnection.weight); connectionIndex++; } neuronIndex++; } }
// depth first search of the changes of the weights of the SLIM-RNN // // this version doesn't timeshare the computations (to save RAM) // /param mustHalt has the SLIM-RNN to halt to be a valid solution? void depthFirstSearch(uint levinSearchIteration, bool mustHalt, out bool wasSolved, out SlimRnn solutionRnn) { solutionRnn = null; wasSolved = false; WeightChangeTreeElement weightChangeTreeRoot = WeightChangeTreeElement.makeRoot(); currentWeightChangeTreeElement = null; // to ignore calls to ISlimRnnLearningAlgorithm.opportunityToAdjustWeight() // scan all weights for eligable weights List <SlimRnnNeuronWithWeight> eligibleWeights = new List <SlimRnnNeuronWithWeight>(); foreach (SlimRnnNeuron iNeuron in slimRnn.neurons) { eligibleWeights.AddRange(iNeuron.outNeuronsWithWeights.Where(v => v.isEligable)); } // build tree of the possible weight changes // // by iterating over all elements in the trace and adding all possible weight values to the weightChangeTree { List <WeightChangeTreeElement> weightChangeTreeLeafElements = new List <WeightChangeTreeElement> { weightChangeTreeRoot }, nextWeightChangeTreeLeafElements = new List <WeightChangeTreeElement>(); foreach (SlimRnnNeuronWithWeight iTrace in eligibleWeights) { foreach (WeightChangeTreeElement iWeightChangeTreeElement in weightChangeTreeLeafElements) { /* * for ( * uint weightWithPropabilityTableIndex = 0; * weightWithPropabilityTableIndex < weightWithPropabilityTable.Count; * weightWithPropabilityTableIndex++ * ) { * * WeightChangeTreeElement createdWeightChangeTreeElement = WeightChangeTreeElement.make(iTrace, weightWithPropabilityTableIndex, *parent**iWeightChangeTreeElement); * iWeightChangeTreeElement.children.Add(createdWeightChangeTreeElement); * iWeightChangeTreeElement.childrenConnectionNeuronIndices.Add(new Tuple<uint, uint>(iTrace.source.neuronIndex, iTrace.target.neuronIndex)); * * nextWeightChangeTreeLeafElements.Add(createdWeightChangeTreeElement); // keep track of new leaf elements of the weight change tree * }*/ nextWeightChangeTreeLeafElements.AddRange(createWeightChangeTreeElementsForConnectionAndAddToParent(iTrace, iWeightChangeTreeElement)); } weightChangeTreeLeafElements = nextWeightChangeTreeLeafElements; nextWeightChangeTreeLeafElements = new List <WeightChangeTreeElement>(); } } // set all weights of the trace to zero // we do this because the connections are this way inactive and this avoids any call to ISlimRnnLearningAlgorithm.opportunityToAdjustWeight() for // connections which are already inside the weightChangeTree foreach (var iNeuronWithWeight in eligibleWeights) { iNeuronWithWeight.weight = 0.0f; } // depth-first-search iterate and update the weightChange tree as necessary List <DepthFirstSearchStackElement> stack = new List <DepthFirstSearchStackElement>(); stack.Clear(); stack.push(DepthFirstSearchStackElement.make(weightChangeTreeRoot)); while (!stack.isEmpty()) { DepthFirstSearchStackElement topStackElement = stack.pop(); // calls to ISlimRnnLearningAlgorithm.opportunityToAdjustWeight() have to modify the tree currentWeightChangeTreeElement = topStackElement.treeElement; if (!currentWeightChangeTreeElement.isRoot) { // do modification of SLIM-RNN // OPTIMIZATION TODO< check if we have to do this recursivly or if it leads to the right answer with the nonrecursive code, the recursive code is correct > // nonrecursive code: // currentWeightChangeTreeElement.neuronWithWeight.weight = weightWithPropabilityTable[(int)currentWeightChangeTreeElement.weightWithPropabilityTableIndex].weight; // recursive code WeightChangeTreeElement currentWeightUpdateElement = currentWeightChangeTreeElement; for (;;) { if (/*unnecessary currentWeightUpdateElement == null ||*/ currentWeightUpdateElement.isRoot) { break; } currentWeightUpdateElement.neuronWithWeight.weight = weightWithPropabilityTable[(int)currentWeightUpdateElement.weightWithPropabilityTableIndex].weight; currentWeightUpdateElement = currentWeightUpdateElement.parent; } // we need to label all connections which got already adapted currentWeightUpdateElement = currentWeightChangeTreeElement; for (;;) { if (/*unnecessary currentWeightUpdateElement == null ||*/ currentWeightUpdateElement.isRoot) { break; } var connectionTuple = new Tuple <uint, uint>(currentWeightUpdateElement.neuronWithWeight.source.neuronIndex, currentWeightUpdateElement.neuronWithWeight.target.neuronIndex); Debug.Assert(!globalConnectionNeuronIndices.Contains(connectionTuple)); globalConnectionNeuronIndices.Add(connectionTuple); currentWeightUpdateElement = currentWeightUpdateElement.parent; } // debug network //SlimRnnDebug.debugConnections(slimRnn); double tLim = calcTimebound(levinSearchIteration, topStackElement.treeElement); bool slimRnnSolvedTask = tester.doesSlimRnnSolveTask(slimRnn, mustHalt, tLim); if (slimRnnSolvedTask) { // the task has been solved with this network wasSolved = true; solutionRnn = slimRnn; return; } // reset all touched connections to 0.0 to avoid any sideeffects // OPTIMIZATION TODO< in the recursive version of depth-first-search we don't need to do this because we modify the network with each successive call, // so in this version we don't have to reset the whole connections to null > currentWeightUpdateElement = currentWeightChangeTreeElement; for (;;) { if (/*unnecessary currentWeightUpdateElement == null ||*/ currentWeightUpdateElement.isRoot) { break; } currentWeightUpdateElement.neuronWithWeight.weight = 0.0f; currentWeightUpdateElement = currentWeightUpdateElement.parent; } // we need to unlabel all connections which got already adapted currentWeightUpdateElement = currentWeightChangeTreeElement; for (;;) { if (/*unnecessary currentWeightUpdateElement == null ||*/ currentWeightUpdateElement.isRoot) { break; } var connectionTuple = new Tuple <uint, uint>(currentWeightUpdateElement.neuronWithWeight.source.neuronIndex, currentWeightUpdateElement.neuronWithWeight.target.neuronIndex); Debug.Assert(globalConnectionNeuronIndices.Contains(connectionTuple)); globalConnectionNeuronIndices.Remove(connectionTuple); currentWeightUpdateElement = currentWeightUpdateElement.parent; } } // push all children for depth-first-search foreach (var iTreeChildren in topStackElement.treeElement.children) { stack.push(DepthFirstSearchStackElement.make(iTreeChildren)); } } }
// /param mustHalt has the SLIM-RNN to halt to be a valid solution? void iteration(uint levinSearchIteration, bool mustHalt, out bool wasSolved, out SlimRnn solutionRnn) { depthFirstSearch(levinSearchIteration, mustHalt, out wasSolved, out solutionRnn); }
public UniversalSlimRnnSearch(SlimRnn slimRnn, ITaskSolvedAndVerifiedTester tester) { this.slimRnn = slimRnn; this.tester = tester; }