public static void FoodDelivery() { UserFoodDeliveryService curUser = new UserFoodDeliveryService(); FoodDelivery delivery = new FoodDelivery(); delivery.SearchProvider = new RestaurantSearch(); var dict = new Dictionary <string, object>(); dict.Add("rating", 9); dict.Add("title", "SomeValue"); dict.Add("location", "SomeLocation"); var restaraunts = delivery.SearchRestaurant(dict); var selectRest = restaraunts[0]; var menu = selectRest.GetMenu(); var foodToOrder = menu.GetFoods(); Bucket bucket = new Bucket(selectRest); bucket.Add(foodToOrder[0], 2); // build food with ingredients FoodBuilder foodBuild = new FoodBuilder(new Food()); var food = foodBuild.WithSouce().WithoutSugur().Build(); bucket.Add(food, 1); Order order = new Order(); order.Create(bucket, curUser); curUser.PayOrder(order); }
protected override void Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.HistoryNode); bucket.Add(Key.DocumentId, DocumentId); bucket.Add(Key.HistoryType, Type); bucket.AddEntity(Key.Id, Id); bucket.AddEntity(Key.Transition, Transition); }
void IStoreSupport.Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.ParamNode); bucket.Add(Key.DocumentId, DocumentId); bucket.Add(Key.Name, Name); bucket.AddEntity(Key.Expression, Expression); bucket.AddEntity(Key.Location, Location); }
void IStoreSupport.Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.DataNode); bucket.Add(Key.DocumentId, DocumentId); bucket.Add(Key.Id, Id); bucket.AddEntity(Key.Source, Source); bucket.AddEntity(Key.Expression, Expression); bucket.Add(Key.InlineContent, InlineContent?.Value); }
protected override void Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.FinalNode); bucket.Add(Key.DocumentId, DocumentId); bucket.AddEntity(Key.Id, Id); bucket.AddEntityList(Key.OnEntry, OnEntry); bucket.AddEntityList(Key.OnExit, OnExit); bucket.AddEntity(Key.DoneData, DoneData); }
protected override void Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.CustomActionNode); bucket.Add(Key.DocumentId, DocumentId); bucket.Add(Key.Namespace, XmlNamespace); bucket.Add(Key.Name, XmlName); bucket.Add(Key.Content, Xml); bucket.AddEntityList(Key.LocationList, Locations); bucket.AddEntityList(Key.ValueList, Values); }
public void CantAddMoreThanCapacity() { var bucket = new Bucket <int>(5); for (int i = 0; i < 5; i++) { bucket.Add(i); } Trap.Exception(() => bucket.Add(1)).ShouldBeInstanceOf <InvalidOperationException>(); }
protected override void Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.StateMachineNode); bucket.Add(Key.DocumentId, DocumentId); bucket.Add(Key.Name, Name); bucket.Add(Key.DataModelType, DataModelType); bucket.Add(Key.Binding, Binding); bucket.AddEntity(Key.Script, Script); bucket.AddEntity(Key.DataModel, DataModel); bucket.AddEntity(Key.Initial, Initial); bucket.AddEntityList(Key.States, _stateMachine.States); }
protected override void Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.ParallelNode); bucket.Add(Key.DocumentId, DocumentId); bucket.AddEntity(Key.Id, Id); bucket.AddEntity(Key.DataModel, DataModel); bucket.AddEntityList(Key.States, States); bucket.AddEntityList(Key.HistoryStates, HistoryStates); bucket.AddEntityList(Key.Transitions, Transitions); bucket.AddEntityList(Key.OnEntry, OnEntry); bucket.AddEntityList(Key.OnExit, OnExit); bucket.AddEntityList(Key.Invoke, Invoke); }
void IStoreSupport.Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.CompoundNode); bucket.Add(Key.DocumentId, DocumentId); bucket.AddEntity(Key.Id, Id); bucket.AddEntity(Key.Initial, Initial); bucket.AddEntity(Key.DataModel, DataModel); bucket.AddEntityList(Key.States, States); bucket.AddEntityList(Key.HistoryStates, HistoryStates); bucket.AddEntityList(Key.Transitions, Transitions); bucket.AddEntityList(Key.OnEntry, OnEntry); bucket.AddEntityList(Key.OnExit, OnExit); bucket.AddEntityList(Key.Invoke, Invoke); }
public void CantSetElementPastCurrentIndex() { var bucket = new Bucket <int>(2); bucket.Add(42); Trap.Exception(() => bucket[1] = 1).ShouldBeInstanceOf <IndexOutOfRangeException>(); }
public void CanGetLastItem() { var bucket = new Bucket <int>(5); bucket.Add(42); bucket.LastItem.ShouldEqual(42); }
public override BucketList Run() { var result = new BucketList(); var generalizedBucketList = base.Run(); foreach (var bucket in generalizedBucketList) { var privateBucket = new Bucket(); foreach (var tuple in bucket) { if (Int32.Parse(tuple.GetValue(tuple.GetNumberOfAttributes() - 1)) == 1) { tuple.RemoveAttribute(tuple.GetNumberOfAttributes() - 1); privateBucket.Add(tuple); } if (privateBucket.Count > 0) { privateBucket.node = bucket.node; } } if (privateBucket.Count > 0) { result.Add(privateBucket); } } return(result); }
public void CanSetElemetByIndex() { var bucket = new Bucket <int>(1); bucket.Add(42); bucket[0].ShouldEqual(42); bucket[0] = 1; bucket[0].ShouldEqual(1); }
public Bucket Fields() { var res = new Bucket(); for (int c = 0; c < FieldCount; c++) { res.Add(RS.Fields.Item(c).Name, RS.Fields.Item(c).Value); } return(res); }
public Bucket Fields() { var res = new Bucket(); for (int c = 0; c < FieldCount; c++) { res.Add(DataReader.GetName(c), DataReader.GetValue(c)); } return(res); }
public void NodeReplaceTest() { _engine.TimeOut = TimeSpan.FromMilliseconds(25); var handle = new ManualResetEvent(false); var b = new Bucket(); for (var i = 0; i < Bucket.MaxCapacity; i++) { var n = new Node(NodeId.Create(), new IPEndPoint(IPAddress.Any, i)) { LastSeen = DateTime.UtcNow }; b.Add(n); } b.Nodes[3].LastSeen = DateTime.UtcNow.AddDays(-5); b.Nodes[1].LastSeen = DateTime.UtcNow.AddDays(-4); b.Nodes[5].LastSeen = DateTime.UtcNow.AddDays(-3); _engine.MessageLoop.QuerySent += delegate(object o, SendQueryEventArgs e) { if (!e.TimedOut) { return; } b.Nodes.Sort(); if ((e.EndPoint.Port == 3 && _nodeCount == 0) || (e.EndPoint.Port == 1 && _nodeCount == 1) || (e.EndPoint.Port == 5 && _nodeCount == 2)) { var n = b.Nodes.Find(delegate(Node no) { return(no.EndPoint.Port == e.EndPoint.Port); }); n.Seen(); var response = new PingResponse(n.Id, e.Query.TransactionId); DhtEngine.MainLoop.Queue(delegate { //System.Threading.Thread.Sleep(100); Console.WriteLine("Faking the receive"); _listener.RaiseMessageReceived(response, _node.EndPoint); }); _nodeCount++; } }; var task = new ReplaceNodeTask(_engine, b, null); // FIXME: Need to assert that node 0.0.0.0:0 is the one which failed - i.e. it should be replaced task.Completed += (o, e) => handle.Set(); task.Execute(); Assert.IsTrue(handle.WaitOne(4000, false), "#10"); }
private Node AvlAdd(int hash, TKey key, TValue value, Lineage lineage, bool overwrite) { #if ASSERTS var initialCount = Count; #endif if (IsEmpty) { throw ImplErrors.Invalid_invocation("Empty Node"); } Node ret; if (hash < Hash) { var newLeft = Left.IsEmpty ? NewForKvp(hash, key, value, lineage) : Left.AvlAdd(hash, key, value, lineage, overwrite); if (newLeft == null) { return(null); } ret = AvlBalance(newLeft, Right, lineage); } else if (hash > Hash) { var newRight = Right.IsEmpty ? NewForKvp(hash, key, value, lineage) : Right.AvlAdd(hash, key, value, lineage, overwrite); if (newRight == null) { return(null); } ret = AvlBalance(Left, newRight, lineage); } else { var newBucket = Bucket.Add(key, value, lineage, overwrite); if (newBucket == null) { return(null); } ret = WithBucket(newBucket, lineage); } #if ASSERTS ret.Count.AssertEqual(x => x <= initialCount + 1 && x >= initialCount); ret.IsBalanced.AssertTrue(); ret.Root_Contains(key).AssertTrue(); //ret.AllBuckets.Count(b => b.Find(key).IsSome).Is(1); #endif return(ret); }
private void Add(ActivationData item, DateTime ticket) { // note: we expect the activation lock to be held. item.ResetCollectionCancelledFlag(); Bucket bucket = buckets.GetOrAdd( ticket, key => new Bucket(key, quantum)); bucket.Add(item); item.SetCollectionTicket(ticket); }
public void Split(out Bucket left, out Bucket right) { Color32 range; CalcRange(out range); SortColors(range); left = new Bucket(); right = new Bucket(); if (_colors.Count == 1) { left.Add(_colors); } else if (_colors.Count >= 2) { int totalCount = _colors.Count; int leftCount = totalCount / 2; left.Add(_colors.GetRange(0, leftCount)); right.Add(_colors.GetRange(leftCount, totalCount - leftCount)); } }
internal static Distribution CreateShifted(ICollection <Configuration> configs) { double domainMin = configs.Min(config => config.nfpValues[GlobalState.currentNFP]); double domainMax = configs.Max(config => config.nfpValues[GlobalState.currentNFP]); double bucketSize = (domainMax - domainMin) / configs.Count * 2; Distribution dist = new Distribution(bucketSize, domainMin - bucketSize / 2); foreach (Configuration config in configs) { double nfpValue = config.nfpValues[GlobalState.currentNFP]; Bucket bucket = dist.GetBucketContaining(nfpValue); bucket.Add(config); } return(dist); }
public bool IsAllowed(int userId, int requestId) { bool result = false; Bucket bucket = buckets.GetOrAdd(userId, new Bucket(userId, MaximumRequestQuota)); applyRestoreRate(bucket); if (bucket.RequestQuota > 0) { bucket.Add(requestId); result = true; } return(result); }
public Bucket ConvertDataTableToBucket(DataTable dt) { Bucket bucket = new Bucket(); foreach (DataRow row in dt.Rows) { var tuple = new data.Tuple(); tuple.SetQid(qid); foreach (DataColumn column in dt.Columns) { string value = row[column].ToString().Trim(); tuple.AddValue(value); } bucket.Add(tuple); } return(bucket); }
public void CanAddElements() { var bucket = new Bucket <int>(10); for (int i = 0; i < 5; i++) { bucket.Add(i); } for (int i = 0; i < 5; i++) { bucket[i].ShouldEqual(i); } bucket.Buffer.ShouldNotBeNull(); bucket.Buffer.Length.ShouldEqual(10); bucket.FreeSpace.ShouldEqual(5); bucket.Index.ShouldEqual(5); bucket.IsEmpty.ShouldBeFalse(); }
void IStoreSupport.Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.InvokeNode); bucket.Add(Key.DocumentId, DocumentId); bucket.Add(Key.Id, Id); bucket.Add(Key.Type, Type); bucket.Add(Key.Source, Source); bucket.Add(Key.AutoForward, AutoForward); bucket.AddEntity(Key.TypeExpression, TypeExpression); bucket.AddEntity(Key.SourceExpression, SourceExpression); bucket.AddEntity(Key.IdLocation, IdLocation); bucket.AddEntityList(Key.NameList, NameList); bucket.AddEntityList(Key.Parameters, Parameters); bucket.AddEntity(Key.Finalize, Finalize); bucket.AddEntity(Key.Content, Content); }
/// <summary> /// Generalizes or specializes the input bucket according to the given hierarchy /// and hierchy level that is stored inside the hierarchy. /// </summary> /// <returns>Returns the specialized or generalized version of the input Bucket.</returns> protected Bucket GeneralizeBucket(Bucket bucket, List <IHierarchy> hierarchies) { Bucket newEq = new Bucket(); for (int i = 0; i < hierarchies.Count; i++) { hierarchies[i].SetLevel(bucket.node.generalizations[i]); } foreach (data.Tuple tuple in bucket) { //I suppose that the dimension of id is 0 int id = Convert.ToInt32(tuple.GetValue(0)); newEq.Add(idHashIndex[id]); } var generalizedEq = newEq.Generalize(hierarchies); Node node = CopyNode(bucket.node); generalizedEq.node = node; return(generalizedEq); }
public override BucketList Run() { var result = new BucketList(); var generalizedBucketList = base.Run(); var genPerm = GetBestPermutedTable(generalizedBucketList); foreach (var bucket in genPerm) { //if any of the bucket is null, it means that it is not delta-present, k has to be increased if (bucket == null) { return(null); } var privateBucket = new Bucket(); foreach (var tuple in bucket) { if (Int32.Parse(tuple.GetValue(tuple.GetNumberOfAttributes() - 1)) == 1) { tuple.RemoveAttribute(tuple.GetNumberOfAttributes() - 1); privateBucket.Add(tuple); } if (privateBucket.Count > 0) { privateBucket.node = bucket.node; } } if (privateBucket.Count > 0) { foreach (int i in qid) { privateBucket.PermuteValues(i); } result.Add(privateBucket); } } return(result); }
public static Bucket GetValues(ref SBOItemEventArg pVal, params dynamic[] columns) { var oCFLEvent = pVal as SAPbouiCOM.ISBOChooseFromListEventArg; var oDataTable = oCFLEvent.SelectedObjects; if (oDataTable == null) { return(null); } var bucket = new Bucket(oDataTable.Rows.Count); for (int i = 0; i < oDataTable.Rows.Count; i++) { foreach (var column in columns) { bucket.Add(column, oDataTable.GetValue(column, i), i); } } return(bucket); }
public override BucketList Run() { var result = new BucketList(); var generalizedBucketList = base.Run(); foreach (var bucket in generalizedBucketList) { var privateBucket = new Bucket(); foreach (var tuple in bucket) { if (Int32.Parse(tuple.GetValue(tuple.GetNumberOfAttributes() - 1)) == 1) { tuple.RemoveAttribute(tuple.GetNumberOfAttributes() - 1); privateBucket.Add(tuple); } if (privateBucket.Count > 0) privateBucket.node = bucket.node; } if (privateBucket.Count > 0) result.Add(privateBucket); } return result; }
public PaletteMedianCut(List <UInt32> colors, int targetCount) { _buckets = new List <Bucket>(); Bucket bucket = new Bucket(); bucket.Add(colors); _buckets.Add(bucket); while (_buckets.Count < targetCount) { List <Bucket> newBuckets = new List <Bucket>(); foreach (Bucket currentBucket in _buckets) { Bucket left, right; currentBucket.Split(out left, out right); newBuckets.Add(left); newBuckets.Add(right); } _buckets = newBuckets; } palette = CreatePalette(); }
/// <summary> /// Strict partitioning step of the Mondrian algorithm. /// </summary> /// <returns>Returns true, if it is possible to split the bucket, /// false otherwise.</returns> private bool FindStrictSplit(Bucket bucket, List<IHierarchy> hierarchies, int hierarchyIndex) { Node originalNode = CopyNode(bucket.node); Node specializedNode = CopyNode(bucket.node); int level = specializedNode.generalizations[hierarchyIndex]; if (level != 0) specializedNode.generalizations[hierarchyIndex] = level - 1; else return false; bucket.node = CopyNode(specializedNode); Bucket specializedEQClass = GeneralizeBucket(bucket, hierarchies); int dimension = hierarchies[hierarchyIndex].GetQid(); foreach (var splittingValue in GetDistinctValues(specializedEQClass, dimension)) { Bucket right = new Bucket(), left = new Bucket(); foreach (var t in specializedEQClass) { if (t.GetValue(dimension).Equals(splittingValue)) left.Add(t); else right.Add(t); } // If the left has the same size as the original eq, maybe it is possible to release more generalizations. left.node = CopyNode(specializedNode); if (left.Count == bucket.Count) { queue.Enqueue(left); return true; } Node leftNode = left.node; var generalizedRight = new Bucket(); generalizedRight.node = CopyNode(originalNode); //generalize back the right side/the rest if (right.Count > 0) { if (right.HasDistinctValuesAt(dimension) != -1) { right.node = CopyNode(originalNode); generalizedRight = GeneralizeBucket(right, hierarchies); } else { right.node = CopyNode(specializedNode); generalizedRight = right; generalizedRight.node = CopyNode(specializedNode); } } if (IsAnonymous(left) && IsAnonymous(generalizedRight)) { queue.Enqueue(generalizedRight); queue.Enqueue(left); return true; } } bucket.node = CopyNode(originalNode); return false; }
public Bucket ConvertDataTableToBucket(DataTable dt) { Bucket bucket = new Bucket(); foreach (DataRow row in dt.Rows) { var tuple = new data.Tuple(); tuple.SetQid(qid); foreach (DataColumn column in dt.Columns) { string value = row[column].ToString().Trim(); tuple.AddValue(value); } bucket.Add(tuple); } return bucket; }
void IStoreSupport.Store(Bucket bucket) { bucket.Add(Key.TypeInfo, TypeInfo.FinalizeNode); bucket.AddEntityList(Key.Action, Action); }
/// <summary> /// Generalizes or specializes the input bucket according to the given hierarchy /// and hierchy level that is stored inside the hierarchy. /// </summary> /// <returns>Returns the specialized or generalized version of the input Bucket.</returns> protected Bucket GeneralizeBucket(Bucket bucket, List<IHierarchy> hierarchies) { Bucket newEq = new Bucket(); for (int i = 0; i < hierarchies.Count; i++) { hierarchies[i].SetLevel(bucket.node.generalizations[i]); } foreach (data.Tuple tuple in bucket) { //I suppose that the dimension of id is 0 int id = Convert.ToInt32(tuple.GetValue(0)); newEq.Add(idHashIndex[id]); } var generalizedEq = newEq.Generalize(hierarchies); Node node = CopyNode(bucket.node); generalizedEq.node = node; return generalizedEq; }
public void NodeReplaceTest() { engine.TimeOut = TimeSpan.FromMilliseconds(25); ManualResetEvent handle = new ManualResetEvent(false); Bucket b = new Bucket(); for (int i = 0; i < Bucket.MaxCapacity; i++) { Node n = new Node(NodeId.Create(), new IPEndPoint(IPAddress.Any, i)); n.LastSeen = DateTime.UtcNow; b.Add(n); } b.Nodes[3].LastSeen = DateTime.UtcNow.AddDays(-5); b.Nodes[1].LastSeen = DateTime.UtcNow.AddDays(-4); b.Nodes[5].LastSeen = DateTime.UtcNow.AddDays(-3); engine.MessageLoop.QuerySent += delegate(object o, SendQueryEventArgs e) { if (!e.TimedOut) return; b.Nodes.Sort(); if ((e.EndPoint.Port == 3 && nodeCount == 0) || (e.EndPoint.Port == 1 && nodeCount == 1) || (e.EndPoint.Port == 5 && nodeCount == 2)) { Node n = b.Nodes.Find(delegate(Node no) { return no.EndPoint.Port == e.EndPoint.Port; }); n.Seen(); PingResponse response = new PingResponse(n.Id, e.Query.TransactionId); DhtEngine.MainLoop.Queue(delegate { //System.Threading.Thread.Sleep(100); Console.WriteLine("Faking the receive"); listener.RaiseMessageReceived(response, node.EndPoint); }); nodeCount++; } }; ReplaceNodeTask task = new ReplaceNodeTask(engine, b, null); // FIXME: Need to assert that node 0.0.0.0:0 is the one which failed - i.e. it should be replaced task.Completed += delegate(object o, TaskCompleteEventArgs e) { handle.Set(); }; task.Execute(); Assert.IsTrue(handle.WaitOne(4000, false), "#10"); }
public override BucketList Run() { var result = new BucketList(); var generalizedBucketList = base.Run(); var genPerm = GetBestPermutedTable(generalizedBucketList); foreach (var bucket in genPerm) { //if any of the bucket is null, it means that it is not delta-present, k has to be increased if (bucket == null) return null; var privateBucket = new Bucket(); foreach (var tuple in bucket) { if (Int32.Parse(tuple.GetValue(tuple.GetNumberOfAttributes() - 1)) == 1) { tuple.RemoveAttribute(tuple.GetNumberOfAttributes() - 1); privateBucket.Add(tuple); } if (privateBucket.Count > 0) privateBucket.node = bucket.node; } if (privateBucket.Count > 0) { foreach (int i in qid) privateBucket.PermuteValues(i); result.Add(privateBucket); } } return result; }