public List <Interval> BuildIntervals(DirectedGraph <StructureNode> graph, StructureNode entry) { if (graph == null) { throw new ArgumentNullException("graph"); } if (entry == null) { throw new ArgumentNullException("entry"); } var intervalsInGraph = new List <Interval>(); // The sequence of intervals in this graph var headers = new WorkList <StructureNode>(); // The sequence of interval header nodes var beenInH = new HashSet <StructureNode>(); // The set of nodes that have been in the above sequence at some stage headers.Add(entry); beenInH.Add(entry); StructureNode header; while (headers.GetWorkItem(out header)) { Interval newInt = new Interval(intervalID++, header); // Process each succesive node in the interval until no more nodes can be added to the interval. for (int i = 0; i < newInt.Nodes.Count; i++) { StructureNode curNode = newInt.Nodes[i]; foreach (StructureNode succ in graph.Successors(curNode)) { // Only further consider the current child if it isn't already in the interval if (!newInt.Nodes.Contains(succ)) { // If the current child has all its parents // inside the interval, then add it to the interval. Remove it from the header // sequence if it is on it. if (IsSubSetOf(graph.Predecessors(succ), newInt)) { newInt.AddNode(succ); headers.Remove(succ); } // Otherwise, add it to the header sequence if it hasn't already been in it. else if (!beenInH.Contains(succ)) { headers.Add(succ); beenInH.Add(succ); } } } } // Add the new interval to the sequence of intervals intervalsInGraph.Add(newInt); } return(intervalsInGraph); }
public void BuildIntervals(DerivedGraph derGraph) { if (derGraph == null) throw new ArgumentNullException("derGraph"); if (derGraph.Entry == null) throw new ArgumentException("cfg graph must be non-null.", "derGraph"); var intSeq = derGraph.Intervals; // The sequence of intervals in this graph var headerSeq = new WorkList<StructureNode>(); // The sequence of interval header nodes var beenInH = new List<StructureNode>(); // The set of nodes that have been in the above sequence at some stage headerSeq.Add(derGraph.Entry); beenInH.Add(derGraph.Entry); StructureNode header; while (headerSeq.GetWorkItem(out header)) { var newInt = new Interval(intervalID++, header); // Process each succesive node in the interval until no more nodes can be added to the interval. for (int i = 0; i < newInt.Nodes.Count; i++) { var curNode = newInt.Nodes[i]; // Process each child of the current node for (int j = 0; j < curNode.OutEdges.Count; j++) { var succ = curNode.OutEdges[j]; // Only further consider the current child if it isn't already in the interval if (!newInt.Nodes.Contains(succ)) { // If the current child has all its parents // inside the interval, then add it to the interval. Remove it from the header // sequence if it is on it. if (IsSubSetOf(succ.InEdges, newInt)) { newInt.AddNode(succ); headerSeq.Remove(succ); } // Otherwise, add it to the header sequence if it hasn't already been in it. else if (!beenInH.Contains(succ)) { headerSeq.Add(succ); beenInH.Add(succ); } } } } // Add the new interval to the sequence of intervals intSeq.Add(newInt); } }
public List <Statement> GetDefiningStatementClosure(Identifier id) { var visited = new HashSet <SsaIdentifier>(); var wl = new WorkList <SsaIdentifier>(); var stms = new List <Statement>(); wl.Add(ssaIds[id]); while (wl.GetWorkItem(out var sid)) { if (visited.Contains(sid)) { continue; } visited.Add(sid); if (sid.DefStatement == null) { continue; } switch (sid.DefStatement.Instruction) { case AliasAssignment alias: if (alias.Src is Identifier idAlias) { wl.Add(ssaIds[idAlias]); } else { stms.Add(sid.DefStatement); } break; case Assignment ass: if (ass.Src is Identifier idSrc) { wl.Add(ssaIds[idSrc]); } else { stms.Add(sid.DefStatement); } break; case PhiAssignment phi: wl.AddRange(phi.Src.Arguments.Select(a => ssaIds[(Identifier)a.Value])); break; default: stms.Add(sid.DefStatement); break; } } return(stms); }
public void WlRemove() { WorkList<int> w = new WorkList<int>(); w.Add(3); w.Add(2); Assert.IsFalse(w.IsEmpty); w.Remove(3); Assert.IsFalse(w.IsEmpty); w.Remove(2); Assert.IsTrue(w.IsEmpty); int x; Assert.IsFalse(w.GetWorkItem(out x)); }
private HashSet <Statement> PlacePhiFunctions() { HashSet <Statement> phiStatements = new HashSet <Statement>(); var defVars = LocateAllDefinedVariables(AOrig); MarkTemporariesDeadIn(AOrig); // For each defined variable in block n, collect the places where it is defined foreach (var a in defVars) { // Create a worklist W of all the blocks that define a. var W = new WorkList <Block>(); foreach (Block b in SsaState.DomGraph.ReversePostOrder.Keys) { byte bits; AOrig[SsaState.RpoNumber(b)].TryGetValue(a, out bits); if ((bits & BitDefined) != 0) { W.Add(b); } } Block n; while (W.GetWorkItem(out n)) { foreach (Block y in SsaState.DomGraph.DominatorFrontier(n)) { // Only add phi functions if there is no // phi already and variable is not deadIn. var dict = AOrig[SsaState.RpoNumber(y)]; byte bits; dict.TryGetValue(a, out bits); if ((bits & (BitHasPhi | BitDeadIn)) == 0) { bits |= BitHasPhi; dict[a] = bits; var stm = InsertPhiStatement(y, a); phiStatements.Add(stm); if ((bits & BitDefined) == 0) { W.Add(y); } } } } } return(phiStatements); }
public void WlRemove() { WorkList <int> w = new WorkList <int>(); w.Add(3); w.Add(2); Assert.IsFalse(w.IsEmpty); w.Remove(3); Assert.IsFalse(w.IsEmpty); w.Remove(2); Assert.IsTrue(w.IsEmpty); int x; Assert.IsFalse(w.GetWorkItem(out x)); }
/// <summary> 生成 </summary> public T Spawn() { T unit = null; while (IdleList.Count > 0 && unit == null) { unit = IdleList[0]; IdleList.RemoveAt(0); } if (unit == null) { unit = CreateNewUnit(); } WorkList.Add(unit); OnBeforeSpawn(unit); IPoolable recyclable; if ((recyclable = unit as IPoolable) != null) { recyclable.OnSpawned(); } OnAfterSpawn(unit); return(unit); }
/// <summary> /// Remove any UseInstructions in the exit block of the procedure that /// can be proved to be dead out. /// </summary> /// <param name="ssa">SSA of the procedure whose exit block is to be examined.</param> /// <param name="wl">Worklist of SSA states.</param> /// <returns>True if any change was made to SSA.</returns> public bool RemoveUnusedDefinedValues(SsaState ssa, WorkList <SsaState> wl) { bool change = false; trace.Verbose("UVR: {0}", ssa.Procedure.Name); var(deadStms, deadStgs) = FindDeadStatementsInExitBlock(ssa, this.dataFlow[ssa.Procedure].BitsLiveOut); // Remove 'use' statements that are known to be dead from the exit block. foreach (var stm in deadStms) { trace.Verbose("UVR: {0}, deleting {1}", ssa.Procedure.Name, stm.Instruction); ssa.DeleteStatement(stm); change = true; } // If any instructions were removed, update the callers. if (!ssa.Procedure.Signature.ParametersValid && deadStms.Count > 0) { DeadCode.Eliminate(ssa); foreach (Statement stm in program.CallGraph.CallerStatements(ssa.Procedure)) { if (!(stm.Instruction is CallInstruction ci)) { continue; } var ssaCaller = this.procToSsa[stm.Block.Procedure]; if (RemoveDeadCallDefinitions(ssaCaller, ci, deadStgs)) { wl.Add(ssaCaller); } } } return(change); }
/// <summary> /// Remove any storages in the ProcedureFlow <paramref name="flow"/> associated /// with the procedure <paramref name="proc"/> if they are dead. /// </summary> private bool RemoveLiveInStorages(Procedure proc, ProcedureFlow flow, WorkList <SsaState> wl) { var defs = proc.EntryBlock.Statements .Select(s => s.Instruction as DefInstruction) .Where(s => s != null) .Select(s => s !.Identifier.Storage) .ToHashSet(); var deadStgs = flow.BitsUsed.Keys.Except(defs).ToHashSet(); bool changed = false; foreach (var d in deadStgs) { flow.BitsUsed.Remove(d); changed = true; } if (changed) { foreach (Statement stm in program.CallGraph.CallerStatements(proc)) { if (!(stm.Instruction is CallInstruction ci)) { continue; } var ssaCaller = this.procToSsa[stm.Block.Procedure]; if (RemoveDeadCallUses(ssaCaller, stm, ci, deadStgs)) { wl.Add(ssaCaller); } } } return(changed); }
/// <summary> /// As far as possible, try fusing consecutive linear blocks in the /// cluster. /// </summary> /// <param name="cluster"></param> public void FuseLinearBlocks(Cluster cluster) { var wl = new WorkList <RtlBlock>(cluster.Blocks); while (wl.GetWorkItem(out var block)) { if (sr.ICFG.Successors(block).Count != 1) { continue; } var succ = sr.ICFG.Successors(block).First(); if (sr.ICFG.Predecessors(succ).Count != 1) { continue; } Debug.Assert(sr.ICFG.Predecessors(succ).First() == block, "Inconsistent graph"); if (!(block.Instructions.Last().Instructions.Last() is RtlAssignment)) { continue; } // Move all instructions into predecessor. block.Instructions.AddRange(succ.Instructions); sr.ICFG.RemoveEdge(block, succ); var succSuccs = sr.ICFG.Successors(succ).ToList(); foreach (var ss in succSuccs) { sr.ICFG.RemoveEdge(succ, ss); sr.ICFG.AddEdge(block, ss); } cluster.Blocks.Remove(succ); // May be more blocks. wl.Add(block); } }
public async Task PrepareWorkList() { if (WorkListLoaded) { return; } List <Work> list = Model.GetWorkList(CourseId); WorkList.Clear(); foreach (Work work in list) { WorkList.Add(new WorkVM(work)); } string id = CourseId; if (await Model.RefWorkList(CourseId) == MainModel.UpdateResult.Success) { if (id == CourseId) { WorkList.Clear(); foreach (Work work in list) { WorkList.Add(new WorkVM(work)); } } } WorkListLoaded = true; RaisePropertyChanged("WorkList"); }
private void PushEquality(WorkList wl, SymbolicValue v1, SymbolicValue v2) { if (v1 == v2) { return; } wl.Add(new EqPair(v1, v2)); }
public override void VisitIdentifier(Identifier id) { SsaIdentifier sid = ssa.Identifiers[id]; if (sid.DefStatement != null) { liveIds.Add(sid); } }
/// <summary> /// Create a save work (with a differential save algorithm) /// </summary> /// <param name="_name">Name of the work (must be different from existing ones)</param> /// <param name="_source">The Source path to save</param> /// <param name="_destination">The Target destination to save files in</param> public void CreateDifferencialWork(string _name, string _source, string _destination, List <Extension> _extension) { DifferencialSaveWork work = new DifferencialSaveWork(_name, _source, _destination, _extension, SaveWorkType.differencial); WorkList.Add(work); SetWorkIndex(); UpdateSaveFile(); EditLog.CreateWorkLogLine(work); }
/// <summary> /// Create a save work (with a complete save algorithm) /// </summary> /// <param name="_name">Name of the work (must be different from existing ones)</param> /// <param name="_source">The Source path to save</param> /// <param name="_destination">The Target destination to save files in</param> public void CreateCompleteWork(string _name, string _source, string _destination, List <Extension> _extension) { CompleteSaveWork work = new CompleteSaveWork(_name, _source, _destination, _extension, SaveWorkType.complete); WorkList.Add(work); SetWorkIndex(); UpdateSaveFile(); EditLog.CreateWorkLogLine(work); }
private bool TryPushEquality(WorkList <EqualityPair <TFunc, TADomain> > workList, SymValue sv1, SymValue sv2) { if (sv1 != sv2) { workList.Add(new EqualityPair <TFunc, TADomain> (sv1, sv2)); return(true); } return(false); }
public void WlAdd() { WorkList<int> w = new WorkList<int>(); w.Add(3); Assert.IsFalse(w.IsEmpty); int x; Assert.IsTrue(w.GetWorkItem(out x)); Assert.AreEqual(3, x); Assert.IsTrue(w.IsEmpty); }
public override Instruction TransformPhiAssignment(PhiAssignment phi) { for (int i = 0; i < phi.Src.Arguments.Length; ++i) { Identifier idSrc = (Identifier)phi.Src.Arguments[i]; ssaIds[idSrc].Uses.Remove(stmDef); wl.Add(idSrc); } return(phi); }
public void SetWorksList(List <WorksInfoDataModel> data) { data = data.OrderBy(p => p.Name).ToList(); WorkList.Clear(); data.ForEach(o => WorkList.Add(o)); if (LoadWorkListComplete != null) { LoadWorkListComplete(this, new EventArgs()); } }
public void Dump(TextWriter tw) { HashSet seen = new HashSet(); WorkList wl = new WorkList(); Console.WriteLine("LastSymbolId:{0}", this.idCounter); foreach (IUniqueKey function in this.termMap.Keys2(this.constRoot)) { SymbolicValue target = this[this.constRoot, function]; tw.WriteLine("{0} = {1}", Function2String(function), target); wl.Add(target); } while (!wl.IsEmpty()) { SymbolicValue v = (SymbolicValue)wl.Pull(); if (!seen.Add(v)) { continue; } foreach (IUniqueKey function in this.termMap.Keys2(v)) { SymbolicValue target = this[v, function]; tw.WriteLine("{0}({2}) = {1}", Function2String(function), target, v); wl.Add(target); } } tw.WriteLine("**Abstract value map"); foreach (SymbolicValue v in seen) { AbstractValue aval = this[v]; if (!this.elementLattice.IsTop(aval)) { tw.WriteLine("{0} -> {1}", v, aval); } } }
public void WlAdd() { WorkList <int> w = new WorkList <int>(); w.Add(3); Assert.IsFalse(w.IsEmpty); int x; Assert.IsTrue(w.GetWorkItem(out x)); Assert.AreEqual(3, x); Assert.IsTrue(w.IsEmpty); }
public void ReplaceDefinitionsWithOutParameter(Identifier id, Identifier idOut) { this.idOut = idOut; wl = new WorkList <Identifier>(); wl.Add(id); var visited = new HashSet <Statement>(); while (wl.GetWorkItem(out id)) { ssa = ssaIds[id]; stmDef = ssa.DefStatement; if (stmDef != null && !visited.Contains(stmDef)) { visited.Add(stmDef); iStmDef = stmDef.Block.Statements.IndexOf(stmDef); stmDef.Instruction = stmDef.Instruction.Accept(this); } } }
public void ReplaceDefinitionsWithOutParameter(Identifier id, Identifier idOut) { this.idOut = idOut; wl = new WorkList<Identifier>(); wl.Add(id); var visited = new HashSet<Statement>(); while (wl.GetWorkItem(out id)) { ssa = ssaIds[id]; stmDef = ssa.DefStatement; if (stmDef != null && !visited.Contains(stmDef)) { visited.Add(stmDef); iStmDef = stmDef.Block.Statements.IndexOf(stmDef); stmDef.Instruction = stmDef.Instruction.Accept(this); } } }
public void Transform() { var wl = new WorkList <Block>(proc.ControlGraph.Blocks); while (wl.GetWorkItem(out Block block)) { if (listener.IsCanceled()) { return; } var c = DetermineCandidate(block); if (c == null) { continue; } FuseIntoPredecessor(c); // We may need to mutate the predecessor again. wl.Add(c.Predecessor); } }
private State [] BuildDfaTable(Node n) { List <State> dStates = new List <State>(); // Create the default, error state. State err = new State(new BitArray(n.FirstPos.Length), charClasses); AddState(dStates, err); // Create the initial state. State s0 = new State(n.FirstPos, charClasses); AddState(dStates, s0); // Start the worklist. WorkList <State> worklist = new WorkList <State>(); worklist.Add(s0); State t; while (worklist.TryGetWorkItem(out t)) { Debug.WriteLine(t.ToString()); for (int a = 0; a != charClasses; ++a) { // Create U, a state consisting of the positions in // FollowPos(p) where p is any position in t that has // an 'a'. State u = new State(new BitArray(positions.Count), charClasses); for (int p = 0; p != t.Positions.Length; ++p) { if (!t.Positions[p]) { continue; } ByteNode pp = (ByteNode)positions[p]; if (pp.Any || alphabet[pp.startByte] == a) { u.Positions.Or(pp.FollowPos); } t.Accepts |= pp.Accepts; } if (IsEmptySet(u.Positions)) { u = null; } else { State uu = FindState(dStates, u.Positions); if (uu == null) { AddState(dStates, u); worklist.Add(u); } else { u = uu; } } t.NextState[a] = u; } Debug.WriteLine("t complete: " + t); } return(dStates.ToArray()); }
private State [] BuildDfaTable(Node n) { List<State> dStates = new List<State>(); // Create the default, error state. State err = new State(new BitArray(n.FirstPos.Length), charClasses); AddState(dStates, err); // Create the initial state. State s0 = new State(n.FirstPos, charClasses); AddState(dStates, s0); // Start the worklist. WorkList<State> worklist = new WorkList<State>(); worklist.Add(s0); State t; while (worklist.GetWorkItem(out t)) { Debug.WriteLine(t.ToString()); for (int a = 0; a != charClasses; ++a) { // Create U, a state consisting of the positions in // FollowPos(p) where p is any position in t that has // an 'a'. State u = new State(new BitArray(positions.Count), charClasses); for (int p = 0; p != t.Positions.Length; ++p) { if (!t.Positions[p]) continue; ByteNode pp = (ByteNode) positions[p]; if (pp.Any || alphabet[pp.startByte] == a) { u.Positions.Or(pp.FollowPos); } t.Accepts |= pp.Accepts; } if (IsEmptySet(u.Positions)) { u = null; } else { State uu = FindState(dStates, u.Positions); if (uu == null) { AddState(dStates, u); worklist.Add(u); } else { u = uu; } } t.NextState[a] = u; } Debug.WriteLine("t complete: " + t); } return dStates.ToArray(); }
/// <summary> /// Must follow field edges of value types backwards as well /// </summary> public Variable GetLocalMappingToLoc(ISymValue loc) { WorkList refparams = new WorkList(); WorkList fields = new WorkList(); fields.Add(loc); while (! fields.IsEmpty()) { ISymValue sv = (ISymValue)fields.Pull(); foreach (EGraphTerm eterm in egraph.EqTerms(sv)) { if ( !(eterm.Function is StackVariable)) { Variable v = eterm.Function as Variable; if (v != null) { return v; } Field f = eterm.Function as Field; if (f != null && f.DeclaringType.IsValueType) { if (eterm.Args.Length>0) { fields.Add(eterm.Args[0]); } } if (eterm.Function == ValueOf && eterm.Args.Length>0) { // could be that we are looking at a ref parameter refparams.Add(eterm.Args[0]); } } } } while (! refparams.IsEmpty()) { ISymValue sv = (ISymValue)refparams.Pull(); foreach (EGraphTerm eterm in egraph.EqTerms(sv)) { if ( !(eterm.Function is StackVariable)) { Variable v = eterm.Function as Variable; if (v != null && (v.Type is Reference || v.Type is Pointer)) { return v; } } } } return null; }
private static bool InternalLessEqual(SymGraph <TFunc, TADomain> thisG, SymGraph <TFunc, TADomain> thatG, out IImmutableMap <SymValue, Sequence <SymValue> > forward, out IImmutableMap <SymValue, SymValue> backward) { int updateSize; SymGraph <TFunc, TADomain> commonTail = ComputeCommonTail(thisG, thatG, out updateSize); if (thisG.IsImmutable) { thisG = thisG.Clone(); } var workList = new WorkList <EqualityPair <TFunc, TADomain> > (); workList.Add(new EqualityPair <TFunc, TADomain> (thisG.const_root, thatG.const_root)); IImmutableSet <SymValue> backwardManifested = ImmutableSet <SymValue> .Empty(SymValue.GetUniqueKey); IImmutableMap <SymValue, SymValue> backwardMap = ImmutableIntKeyMap <SymValue, SymValue> .Empty(SymValue.GetUniqueKey); IImmutableMap <SymValue, Sequence <SymValue> > forwardMap = ImmutableIntKeyMap <SymValue, Sequence <SymValue> > .Empty(SymValue.GetUniqueKey); IImmutableMap <SymValue, int> triggers = ImmutableIntKeyMap <SymValue, int> .Empty(SymValue.GetUniqueKey); while (!workList.IsEmpty()) { EqualityPair <TFunc, TADomain> equalityPair = workList.Pull(); SymValue sv1 = equalityPair.Sv1; SymValue sv2 = equalityPair.Sv2; SymValue s; if (VisitedBefore(sv2, backwardManifested, backwardMap, out s)) { if (s != null && s == sv1) { continue; } if (DebugOptions.Debug) { Console.WriteLine("---LessEqual fails due to pre-existing relation: {0} <- {1}", s, sv2); } forward = null; backward = null; return(false); } TADomain val1 = sv1 == null?thisG.UnderlyingTopValue.ForManifestedField() : thisG [sv1]; TADomain val2 = thatG [sv2]; if (!val1.LessEqual(val2)) { if (DebugOptions.Debug) { Console.WriteLine("---LessEqual fails due to abstract values: !({0} <= {1})", val1, val2); } forward = null; backward = null; return(false); } if (sv1 != null) { backwardMap = backwardMap.Add(sv2, sv1); forwardMap = forwardMap.Add(sv1, forwardMap [sv1].Cons(sv2)); } else { backwardManifested = backwardManifested.Add(sv2); } if (thisG.HasAllBottomFields(sv1)) { continue; } if (thatG.HasAllBottomFields(sv2)) { if (DebugOptions.Debug) { Console.WriteLine("---LessEqual fails due to bottom field difference"); } forward = null; backward = null; return(false); } foreach (TFunc function in thatG.Functions(sv2)) { SymValue v1 = thisG [function, sv1]; SymValue v2 = thatG [function, sv2]; if (DebugOptions.Debug) { Console.WriteLine(" {0}-{1}->{2} <=? {3}-{4}->{5}", sv1, function, v1, sv2, function, v2); } workList.Add(new EqualityPair <TFunc, TADomain> (v1, v2)); } foreach (var e in thatG.MultiEdges(sv2)) { foreach (SymValue sv in thatG.MultiEdgeMap[sv2, e].AsEnumerable()) { if (!UpdateTrigger(sv, e, ref triggers)) { continue; } SymGraphTerm <TFunc> term = thatG.EqualMultiTermsMap [sv]; var args = new SymValue[term.Args.Length]; for (int i = 0; i < args.Length; i++) { args [i] = backwardMap [term.Args [i]]; } SymValue v1 = thisG.LookupWithoutManifesting(args, e.Function); if (v1 == null) { if (DebugOptions.Debug) { Console.WriteLine("---LessEqual fails due to missing multi term {0}({1})", e.Function, string.Join(", ", term.Args.Select(it => it.ToString()))); } forward = null; backward = null; return(false); } workList.Add(new EqualityPair <TFunc, TADomain> (v1, sv)); } } } forward = forwardMap; backward = CompleteWithCommon(backwardMap, thisG, commonTail.IdGenerator); return(true); }
public List<Interval> BuildIntervals(DirectedGraph<StructureNode> graph, StructureNode entry) { if (graph == null) throw new ArgumentNullException("graph"); if (entry == null) throw new ArgumentNullException("entry"); var intervalsInGraph = new List<Interval>(); // The sequence of intervals in this graph var headers = new WorkList<StructureNode>(); // The sequence of interval header nodes var beenInH = new HashSet<StructureNode>(); // The set of nodes that have been in the above sequence at some stage headers.Add(entry); beenInH.Add(entry); StructureNode header; while (headers.GetWorkItem(out header)) { Interval newInt = new Interval(intervalID++, header); // Process each succesive node in the interval until no more nodes can be added to the interval. for (int i = 0; i < newInt.Nodes.Count; i++) { StructureNode curNode = newInt.Nodes[i]; foreach (StructureNode succ in graph.Successors(curNode)) { // Only further consider the current child if it isn't already in the interval if (!newInt.Nodes.Contains(succ)) { // If the current child has all its parents // inside the interval, then add it to the interval. Remove it from the header // sequence if it is on it. if (IsSubSetOf(graph.Predecessors(succ), newInt)) { newInt.AddNode(succ); headers.Remove(succ); } // Otherwise, add it to the header sequence if it hasn't already been in it. else if (!beenInH.Contains(succ)) { headers.Add(succ); beenInH.Add(succ); } } } } // Add the new interval to the sequence of intervals intervalsInGraph.Add(newInt); } return intervalsInGraph; }
public void Dump(TextWriter tw) { var set = new HashSet <SymValue> (); var workList = new WorkList <SymValue> (); IImmutableMap <SymValue, int> triggers = ImmutableIntKeyMap <SymValue, int> .Empty(SymValue.GetUniqueKey); tw.WriteLine("EGraphId: {0}", this.egraph_id); tw.WriteLine("LastSymbolId: {0}", LastSymbolId); foreach (TFunc function in TermMap.Keys2(this.const_root)) { SymValue sv = this [this.const_root, function]; tw.WriteLine("{0} = {1}", function, sv); workList.Add(sv); } while (!workList.IsEmpty()) { SymValue sv = workList.Pull(); if (!set.Add(sv)) { continue; } foreach (TFunc function in TermMap.Keys2(sv)) { SymValue target = this [sv, function]; tw.WriteLine("{0}({2}) = {1})", function, target, sv); workList.Add(target); } foreach (var edge in MultiEdgeMap.Keys2(sv)) { foreach (SymValue target in MultiEdgeMap[sv, edge].AsEnumerable()) { if (!UpdateTrigger(target, edge, ref triggers)) { continue; } SymGraphTerm <TFunc> term = EqualMultiTermsMap [target]; if (term.Args != null) { tw.WriteLine("{0}({1}) = {2}", term.Function, term.Args.ToString(", "), target); workList.Add(target); } } } } tw.WriteLine("**Abstract value map"); foreach (SymValue sv in set) { TADomain abstractValue = this [sv]; if (!abstractValue.IsTop) { tw.WriteLine("{0} -> {1}", sv, abstractValue); } } }
public void Dump(TextWriter tw) { HashSet seen = new HashSet(); WorkList wl = new WorkList(); Console.WriteLine("LastSymbolId:{0}", this.idCounter); foreach(IUniqueKey function in this.termMap.Keys2(this.constRoot)) { SymbolicValue target = this[this.constRoot, function]; tw.WriteLine("{0} = {1}", Function2String(function), target); wl.Add(target); } while ( ! wl.IsEmpty() ) { SymbolicValue v = (SymbolicValue)wl.Pull(); if ( ! seen.Add(v)) continue; foreach(IUniqueKey function in this.termMap.Keys2(v)) { SymbolicValue target = this[v, function]; tw.WriteLine("{0}({2}) = {1}", Function2String(function), target, v); wl.Add(target); } } tw.WriteLine("**Abstract value map"); foreach (SymbolicValue v in seen) { AbstractValue aval = this[v]; if (!this.elementLattice.IsTop(aval)) { tw.WriteLine("{0} -> {1}", v, aval); } } }
private void PushEquality(WorkList wl, SymbolicValue v1, SymbolicValue v2) { if (v1 == v2) return; wl.Add(new EqPair(v1, v2)); }
public void BuildIntervals(DerivedGraph derGraph) { if (derGraph == null) { throw new ArgumentNullException("derGraph"); } if (derGraph.Entry == null) { throw new ArgumentException("cfg graph must be non-null.", "derGraph"); } var intSeq = derGraph.Intervals; // The sequence of intervals in this graph var headerSeq = new WorkList <StructureNode>(); // The sequence of interval header nodes var beenInH = new List <StructureNode>(); // The set of nodes that have been in the above sequence at some stage headerSeq.Add(derGraph.Entry); beenInH.Add(derGraph.Entry); StructureNode header; while (headerSeq.GetWorkItem(out header)) { var newInt = new Interval(intervalID++, header); // Process each succesive node in the interval until no more nodes can be added to the interval. for (int i = 0; i < newInt.Nodes.Count; i++) { var curNode = newInt.Nodes[i]; // Process each child of the current node for (int j = 0; j < curNode.OutEdges.Count; j++) { var succ = curNode.OutEdges[j]; // Only further consider the current child if it isn't already in the interval if (!newInt.Nodes.Contains(succ)) { // If the current child has all its parents // inside the interval, then add it to the interval. Remove it from the header // sequence if it is on it. if (IsSubSetOf(succ.InEdges, newInt)) { newInt.AddNode(succ); headerSeq.Remove(succ); } // Otherwise, add it to the header sequence if it hasn't already been in it. else if (!beenInH.Contains(succ)) { headerSeq.Add(succ); beenInH.Add(succ); } } } } // Add the new interval to the sequence of intervals intSeq.Add(newInt); } }
private void PlacePhiFunctions() { var defVars = LocateAllDefinedVariables(AOrig); MarkTemporariesDeadIn(AOrig); // For each defined variable in block n, collect the places where it is defined foreach (var a in defVars) { // Create a worklist W of all the blocks that define a. var W = new WorkList<Block>(); foreach (Block b in SsaState.DomGraph.ReversePostOrder.Keys) { byte bits; AOrig[SsaState.RpoNumber(b)].TryGetValue(a, out bits); if ((bits & BitDefined) != 0) W.Add(b); } Block n; while (W.GetWorkItem(out n)) { foreach (Block y in SsaState.DomGraph.DominatorFrontier(n)) { // Only add phi functions if there is no // phi already and variable is not deadIn. var dict = AOrig[SsaState.RpoNumber(y)]; byte bits; dict.TryGetValue(a, out bits); if ((bits & (BitHasPhi | BitDeadIn)) == 0) { bits |= BitHasPhi; dict[a] = bits; InsertPhiStatement(y, a); if ((bits & BitDefined) == 0) { W.Add(y); } } } } } }