internal bool UpdatePointstampCount(Pointstamp version, Int64 delta) { var oldFrontier = Frontier; var count = 0L; if (!Counts.TryGetValue(version, out count)) { version = new Pointstamp(version); Counts.Add(version, delta); // Potentially add this version to the frontier if (actualFrontier.Add(version)) { Frontier = actualFrontier.Antichain.ToArray(); } } else { if (count + delta == 0) { Counts.Remove(version); if (actualFrontier.Remove(version)) { Frontier = actualFrontier.Antichain.ToArray(); } } else { Counts[version] = count + delta; } } return(Frontier != oldFrontier); }
public void BroadcastProgressUpdate(Pointstamp time, int update) { this.consumer.InjectElement(time, 1); if (this.centralizer != null) { this.centralizer.InjectElement(time, update); } }
public static Pointstamp ToPointstamp <T>(this T time, int graphObjectID) where T : Time <T> { var pointstamp = new Pointstamp(); pointstamp.Location = graphObjectID; pointstamp.Timestamp.Length = time.Coordinates; time.Populate(ref pointstamp); return(pointstamp); }
public ProgressUpdateBuffer(int name, Runtime.Progress.ProgressUpdateProducer p) { stageID = name; Updates = new Dictionary <T, Int64>(); producer = p; var temp = new Pointstamp(0, new int[] { }); version = new Pointstamp(name, new int[default(T).Populate(ref temp)]); }
internal void InjectElement(Pointstamp time, Int64 update) { // by directly modifying the PCS, we don't risk sending anything from the centralizer. Used only for initializing inputs. Tracing.Trace("(PCSLock"); Monitor.Enter(this.PCS); var frontierChanged = PCS.UpdatePointstampCount(time, update); Monitor.Exit(this.PCS); Tracing.Trace(")PCSLock"); }
internal void InjectElement(Pointstamp time, Int64 update) { // by directly modifying the PCS, we don't risk sending anything from centralizer. Used only for initializing inputs. NaiadTracing.Trace.LockAcquire(this.PCS); Monitor.Enter(this.PCS); NaiadTracing.Trace.LockHeld(this.PCS); var progressChanged = PCS.UpdatePointstampCount(time, update); Monitor.Exit(this.PCS); NaiadTracing.Trace.LockRelease(this.PCS); }
internal bool Remove(Pointstamp element) { int position = elements.Count; for (int i = 0; i < elements.Count; i++) { if (this.Reachability.LessThan(element, elements[i])) { if (position == elements.Count && element.Equals(elements[i])) { position = i; } else { precedents[i]--; } } } if (position == elements.Count) { throw new Exception("Tried to remove an element not present in MinimalAntichain"); } if (position < elements.Count) { elements[position] = elements[elements.Count - 1]; //elements.Count--; elements.RemoveAt(elements.Count - 1); precedents[position] = precedents[precedents.Count - 1]; precedents.RemoveAt(precedents.Count - 1); //precedents.Count--; } var changes = false; for (int i = 0; i < Antichain.Count; i++) { if (element.Equals(Antichain[i])) { changes = true; } } if (changes) { UpdateAntichain(); } return(changes); }
private void AddToOutstandingRecords(Pointstamp time, Int64 delta) { var count = 0L; if (!outstandingRecords.TryGetValue(time, out count)) { outstandingRecords.Add(new Pointstamp(time), delta); // we want a new time, to avoid capturing the int[] } else { outstandingRecords[time] = count + delta; if (outstandingRecords[time] == 0) { outstandingRecords.Remove(time); } } }
public void UpdateRecordCounts(Pointstamp time, Int64 delta) { Tracing.Trace("(ProdLock"); lock (this) { //if (this.Stage.InternalComputation.Controller.Configuration.Impersonation && !this.Stage.InternalComputation.Reachability.NoImpersonation.Contains(time.Location) && this.Stage.InternalComputation.Reachability.Impersonations[time.Location] != null) //{ // foreach (var newVersion in this.Stage.InternalComputation.Reachability.EnumerateImpersonations(time)) // AddToOutstandingRecords(newVersion, delta); // // this.LocalPCS.UpdatePointstampCount(time, delta); //} //else AddToOutstandingRecords(time, delta); } Tracing.Trace(")ProdLock"); }
internal bool Add(Pointstamp element) { var newPrecedents = 0; for (int i = 0; i < elements.Count; i++) { if (this.Reachability.LessThan(element, elements[i])) { precedents[i]++; } if (this.Reachability.LessThan(elements[i], element)) { newPrecedents++; } if (this.Reachability.LessThan(element, elements[i]) && this.Reachability.LessThan(elements[i], element) && !elements[i].Equals(element)) { this.Reachability.LessThan(element, elements[i]); throw new Exception("Ordering violation " + element + "," + elements[i]); } } elements.Add(element); precedents.Add(newPrecedents); var changes = newPrecedents == 0; for (int i = 0; i < Antichain.Count; i++) { if (this.Reachability.LessThan(element, Antichain[i])) { changes = true; } } if (changes) { UpdateAntichain(); } return(changes); }
public void ProcessCountChange(Pointstamp time, Int64 weight) { // the PCS should not be touched outside this lock, other than by capturing PCS.Frontier. NaiadTracing.Trace.LockAcquire(this.PCS); Monitor.Enter(this.PCS); NaiadTracing.Trace.LockHeld(this.PCS); var oldFrontier = PCS.Frontier; var frontierChanged = PCS.UpdatePointstampCount(time, weight); var newFrontier = PCS.Frontier; Monitor.Exit(this.PCS); NaiadTracing.Trace.LockRelease(this.PCS); if (frontierChanged) { // aggregation may need to flush this.Aggregator.ConsiderFlushingBufferedUpdates(); // fire any frontier changed events if (this.OnFrontierChanged != null) { this.OnFrontierChanged(this.Stage.Computation, new FrontierChangedEventArgs(newFrontier)); } // no elements means done. if (newFrontier.Length == 0) { //Tracing.Trace("Frontier advanced to <empty>"); NaiadTracing.Trace.RefAlignFrontier(); this.FrontierEmpty.Set(); } else { NaiadTracing.Trace.AdvanceFrontier(newFrontier); } // Wake up schedulers to run shutdown actions for the graph. this.Stage.InternalComputation.Controller.Workers.WakeUp(); } }
public bool LessThan(Pointstamp a, Pointstamp b) { // early exit if this.epoch > that.epoch if (a.Timestamp[0] > b.Timestamp[0]) { return(false); } var depth = ComparisonDepth[a.Location][b.Location]; if (depth == 0) { return(false); } else { var increment = depth < 0; depth = Math.Abs(depth); for (int i = 1; i < depth; i++) { if (a.Timestamp[i] > b.Timestamp[i]) { return(false); } if (i + 1 == depth && increment && a.Timestamp[i] + 1 > b.Timestamp[i]) { return(false); } if (a.Timestamp[i] < b.Timestamp[i]) { return(true); } } return(true); } }
public IEnumerable <Pointstamp> EnumerateImpersonations(Pointstamp time) { var limits = this.Impersonations[time.Location]; if (limits == null) { yield break; } else { for (int i = 0; i < limits.Length; i++) { var depths = this.ComparisonDepth[time.Location][limits[i]]; var coords = this.Graph[limits[i]].Depth; var newVersion = new Pointstamp(); newVersion.Location = limits[i]; newVersion.Timestamp.Length = coords; for (int j = 0; j < newVersion.Timestamp.Length; j++) { if (j < Math.Abs(depths)) { newVersion.Timestamp[j] = time.Timestamp[j]; } else { newVersion.Timestamp[j] = 0; } } if (depths < 0) { newVersion.Timestamp[Math.Abs(depths) - 1] = newVersion.Timestamp[Math.Abs(depths) - 1] + 1; } yield return(newVersion); } } }
// compares two causal orders for reachability. uses controller to determine which lattice elements correspond to loops, and which to prioritizations. // for now, the assumption is that the first int is always the input lattice, which has no back edge. // for now, this only works if a and b correspond to the same stage. public bool ProductOrderLessThan(Pointstamp a, Pointstamp b) { if (a.Timestamp.Length != b.Timestamp.Length) { Console.WriteLine("should have same length!"); } if (a.Location != b.Location) { Console.WriteLine("meant to be called on pointstamps of the same stage"); } for (int i = 0; i < a.Timestamp.Length; i++) { if (a.Timestamp[i] > b.Timestamp[i]) { return(false); } } return(true); }
public bool AddToAntiChain(List <Pointstamp> list, Pointstamp time) { // bail if time can be reached by any element of list for (int i = 0; i < list.Count; i++) { if (ProductOrderLessThan(list[i], time)) { return(false); } } // belongs in; clean out reachable times. for (int i = 0; i < list.Count; i++) { if (ProductOrderLessThan(time, list[i])) { list.RemoveAt(i); i--; } } list.Add(time); return(true); }
public int CompareTo(Pointstamp a, Pointstamp b) { if (a.Timestamp[0] != b.Timestamp[0]) { return(a.Timestamp[0] - b.Timestamp[0]); } if (a.Equals(b)) { return(0); } if (this.LessThan(a, b)) { return(-1); } if (this.LessThan(a, b)) { return(1); } return(a.Location - b.Location); }
// Returns a list (indexed by graph identifier) of lists of Pointstamps that can be reached at each collection, for the // given array of times. Each sub-list will be a minimal antichain of Pointstamps at which a collection is reachable. // // If the sublist for a collection is null, that collection is not reachable from the given array of times. public List <Pointstamp>[] DetermineReachabilityList(Pointstamp[] times) { // Initially, the result for each collection is null, which corresponds to it not being reachable from the given times. var result = new List <Pointstamp> [this.Graph.Length]; // For each time, perform breadth-first search from that time to each reachable collection. for (int time = 0; time < times.Length; time++) { // To perform the BFS, we need a list, which will act like a FIFO queue. var queue = new List <int>(); // The BFS starts from the current time's stage var index = times[time].Location; if (result[index] == null) { result[index] = new List <Pointstamp>(0); } // Attempt to add the current time to the antichain for its own collection. if (AddToAntiChain(result[index], times[time])) { // If this succeeds, commence BFS from that collection. queue.Add(index); // While we haven't visited every element of the queue, move to the next element. for (int i = 0; i < queue.Count; i++) { var collectionId = queue[i]; // For each immediately downstream collection from the current collection, attempt to improve the antichain for the downstream. for (int k = 0; k < this.Graph[collectionId].Neighbors.Length; k++) { var target = this.Graph[collectionId].Neighbors[k]; var updated = false; if (result[target] == null) { result[target] = new List <Pointstamp>(0); } // For each element of the current collection's antichain, evaluate the minimal caused version at the downstream collection. for (int j = 0; j < result[collectionId].Count; j++) { // make a new copy so that we can tamper with the contents var localtime = new Pointstamp(result[collectionId][j]); localtime.Location = target; // If the target is a feedback stage, we must increment the last coordinate. if (this.Graph[target].Advance) { localtime.Timestamp[localtime.Timestamp.Length - 1]++; } // If the target is an egress stage, we must strip off the last coordinate. if (this.Graph[target].Egress) { localtime.Timestamp.Length--; localtime.Timestamp[localtime.Timestamp.Length] = 0; } // If the target is an ingress stage, we must add a new coordinate. if (this.Graph[target].Ingress) { localtime.Timestamp.Length++; } if (localtime.Timestamp.Length != this.Graph[target].Depth) { throw new Exception("Something is horribly wrong in Reachability"); } // If the computed minimal time for the downstream collection becomes a member of its antichain, we have updated it // (and must search forward from that collection). if (AddToAntiChain(result[target], localtime)) { updated = true; } } // Where the antichain has been updated, we must search forward from the downstream collection. if (updated) { queue.Add(target); } } } } } return(result); }
// populates this.ComparisonDepth, indexed by collection and channel identifiers. public void UpdateReachabilityPartialOrder(InternalComputation internalComputation) { RegenerateGraph(internalComputation); var reachableDepths = new List <List <int> >(this.Graph.Length); var magicNumber = 37; //Console.Error.WriteLine("Updating reachability with {0} objects", Reachability.Graph.Length); for (int i = 0; i < this.Graph.Length; i++) { var reachable = new List <int>(this.Graph.Length); var versionList = new Pointstamp[] { new Pointstamp(i, Enumerable.Repeat(magicNumber, this.Graph[i].Depth).ToArray()) }; var reachabilityResults = this.DetermineReachabilityList(versionList); for (int j = 0; j < reachabilityResults.Length; j++) { var depth = 0; var increment = false; // for each element of the reachable set if (reachabilityResults[j] != null) { for (int k = 0; k < reachabilityResults[j].Count; k++) { for (int l = 0; l < reachabilityResults[j][k].Timestamp.Length && reachabilityResults[j][k].Timestamp[l] >= magicNumber; l++) { if (l + 1 > depth || l + 1 == depth && increment) { depth = l + 1; increment = (reachabilityResults[j][k].Timestamp[l] > magicNumber); } } } } reachable.Add(increment ? -depth : depth); } reachableDepths.Add(reachable); } this.ComparisonDepth = reachableDepths; #region Set up impersonation // consider each stage / edge this.Impersonations = new int[this.Graph.Length][]; for (int i = 0; i < this.Graph.Length; i++) { // not applicable to exchange edges. if (!this.Graph[i].Exchanges && !this.NoImpersonation.Contains(i)) { var reached = new HashSet <int>(); var limits = new HashSet <int>(); var queue = new List <int>(); //reached.Add(i); queue.Add(i); for (int j = 0; j < queue.Count; j++) { var candidate = queue[j]; // check if queue[j] is interested in masquerading var available = true; for (int k = 0; k < this.Graph[candidate].Neighbors.Length; k++) { var target = this.Graph[candidate].Neighbors[k]; if (this.Graph[target].Exchanges) { available = false; } } if (!reached.Contains(candidate)) { reached.Add(candidate); if (available) { for (int k = 0; k < this.Graph[candidate].Neighbors.Length; k++) { queue.Add(this.Graph[candidate].Neighbors[k]); } } else { limits.Add(candidate); } } } // if we found someone who wants to masquerade if (!limits.Contains(i) && limits.Count > 0) { Impersonations[i] = limits.ToArray(); } else { Impersonations[i] = null; } } } #endregion }
internal Update(Pointstamp p, Int64 d) { this.Pointstamp = p; this.Delta = d; }
public void BroadcastProgressUpdate(Pointstamp time, int update) { this.consumer.InjectElement(time, update); }