Example #1
1
        protected override ActionExecutedContext InvokeActionMethodWithFilters(System.Reflection.MethodInfo methodInfo, IDictionary<string, object> parameters, IList<IActionFilter> filters)
        {
            if (methodInfo == null)
              {
            throw new ArgumentNullException("methodInfo");
              }
              if (parameters == null)
              {
            throw new ArgumentNullException("parameters");
              }
              if (filters == null)
              {
            throw new ArgumentNullException("filters");
              }

              ActionExecutingContext preContext = new ActionExecutingContext(ControllerContext, parameters);
              Func<ActionExecutedContext> continuation = () =>
            new ActionExecutedContext(ControllerContext, false /* canceled */, null /* exception */)
            {
              Result = InvokeActionMethod(methodInfo, parameters)
            };

              // need to reverse the filter list because the continuations are built up backward
              Func<ActionExecutedContext> thunk = filters.Reverse().Aggregate(continuation,
            (next, filter) => () => InvokeActionMethodFilter(filter, preContext, next));
              return thunk();
        }
Example #2
1
 private static string ConvertCalculatedCharactersToBaseString(IList<char> characters, int baseValue)
 {
     var convertedString = new StringBuilder();
     foreach (var character in characters.Reverse())
         convertedString.Append(character);
     return convertedString.ToString().PadLeft(Paddings[baseValue], '0');
 }
 protected override ExceptionContext InvokeExceptionFilters(ControllerContext controllerContext, IList<IExceptionFilter> filters, System.Exception exception)
 {
     var context = new ExceptionContext(controllerContext, exception);
     foreach (var filter in filters.Reverse()) {
         filter.OnException(context);
     }
     SetExceptionResult(controllerContext, context);
     return context;
 }
 private void GenerateResults(List<int>[] prevs, string s, int i, IList<string> temp, IList<IList<string>> results)
 {
     if (i < 0)
     {
         results.Add(temp.Reverse().ToList());
     }
     else
     {
         foreach (var prev in prevs[i])
         {
             temp.Add(s.Substring(prev + 1, i - prev));
             GenerateResults(prevs, s, prev, temp, results);
             temp.RemoveAt(temp.Count - 1);
         }
     }
 }
Example #5
1
 private string ApplyRules(IList<Rule> rules, string word)
 {
     var result = word;
     foreach (var rule in rules.Reverse())
         if ((result = rule.Apply(word)) != null)
             return result;
     return result;
 }
Example #6
1
 private void SaveDisposeInterceptors(IList<IRequestHandlerInterceptor> interceptors)
 {
     foreach (var interceptor in interceptors.Reverse())
     {
         try
         {
             IoC.Container.Release(interceptor);
             interceptor.Dispose();
         }
         catch (Exception exc)
         {
             _logger.Error("error disposing " + interceptor, exc);
         }
     }
 }
Example #7
0
        /// <summary>
        /// Initializes a new instance of the <see cref="CompilationEngine"/> class.
        /// </summary>
        /// <param name="classtokensList">The classtokens list.</param>
        public CompilationEngine(IList<Pair<string, string>> classTokensList)
        {
            // reverse tokens before push onto stack (so we Pop them in the correct order!)
            classTokensList = classTokensList.Reverse().ToList();
            this.classTokens = new Stack<Pair<string, string>>();

            foreach (Pair<string, string> token in classTokensList)
            {
                this.classTokens.Push(token);
            }
        }
Example #8
0
        protected override ActionExecutedContext InvokeActionMethodWithFilters(ControllerContext controllerContext, IList<IActionFilter> filters, ActionDescriptor actionDescriptor, IDictionary<string, object> parameters)
        {
            ActionExecutingContext preContext = new ActionExecutingContext(controllerContext, actionDescriptor, parameters);
              Func<ActionExecutedContext> continuation = () =>
            new ActionExecutedContext(controllerContext, actionDescriptor, false /* canceled */, null /* exception */)
            {
              Result = InvokeActionMethod(controllerContext, actionDescriptor, parameters)
            };

              // need to reverse the filter list because the continuations are built up backward
              Func<ActionExecutedContext> thunk = filters.Reverse().Aggregate(continuation,
            (next, filter) => () => InvokeActionMethodFilter(filter, preContext, next));
              return thunk();
        }
 public static IQueryable<UserModel> ApplySorting(this IQueryable<UserModel> data, IList<GroupDescriptor> groupDescriptors, IList<SortDescriptor> sortDescriptors)
 {
     if (groupDescriptors.Any())
     {
         foreach (var groupDescriptor in groupDescriptors.Reverse())
         {
             data = AddSortExpression(data, groupDescriptor.SortDirection, groupDescriptor.Member);
         }
     }
     if (sortDescriptors.Any())
     {
         foreach (SortDescriptor sortDescriptor in sortDescriptors)
         {
             data = AddSortExpression(data, sortDescriptor.SortDirection, sortDescriptor.Member);
         }
     }
     return data;
 }
Example #10
0
 IEnumerator HistoryToTail(IList<IList<Posture>> history)
 {
     yield return new WaitForFixedUpdate();
     gameObject
         .AfterSelf()
         .Zip(history.Reverse(), (tail, buf) => new { tail, buf })
         .Select(t => Observable.FromCoroutine(() => BufferToTail(t.tail.transform, t.buf)))
         .WhenAll()
         .Subscribe(
             onNext: _ =>
             {
             },
             onCompleted: () =>
             {
                 currentTailCoroutine = StartCoroutine(HistoryToTail(postureHistory));
             })
         .AddTo(GameMaster.Current)
         ;
     Debug.Log("yield return end");
 }
Example #11
0
        internal static void LoadModBuffs(Player player, IList<TagCompound> list)
        {
            //buffs list is guaranteed to be compacted
            int buffCount = Player.maxBuffs;
            while (buffCount > 0 && player.buffType[buffCount - 1] == 0)
                buffCount--;

            //iterate the list in reverse, insert each buff at its index and push the buffs after it up a slot
            foreach (var tag in list.Reverse()) {
                var mod = ModLoader.GetMod(tag.GetString("mod"));
                int type = mod?.BuffType(tag.GetString("name")) ?? 0;
                if (type == 0)
                    continue;

                int index = Math.Min(tag.GetByte("index"), buffCount);
                Array.Copy(player.buffType, index, player.buffType, index+1, Player.maxBuffs-index-1);
                Array.Copy(player.buffTime, index, player.buffTime, index+1, Player.maxBuffs-index-1);
                player.buffType[index] = type;
                player.buffTime[index] = tag.GetInt("time");
            }
        }
Example #12
0
        internal ImportCompletionAnalysis(IList<ClassificationSpan> tokens, ITrackingSpan span, ITextBuffer textBuffer, CompletionOptions options)
            : base(span, textBuffer, options)
        {
            Debug.Assert(tokens[0].Span.GetText() == "import");

            int beforeLastComma = tokens
                .Reverse()
                .SkipWhile(tok => !tok.ClassificationType.IsOfType(JPredefinedClassificationTypeNames.Comma))
                .Count();

            if (tokens.Count >= 2 && beforeLastComma < tokens.Count) {
                int spanEnd = Span.GetEndPoint(textBuffer.CurrentSnapshot).Position;
                var nameParts = new List<string>();
                bool removeLastPart = false, lastWasError = false;
                foreach(var tok in tokens.Skip(beforeLastComma > 0 ? beforeLastComma : 1)) {
                    if (tok.ClassificationType.IsOfType(PredefinedClassificationTypeNames.Identifier)) {
                        nameParts.Add(tok.Span.GetText());
                        // Only remove the last part if the trigger point is
                        // not right at the end of it.
                        removeLastPart = (tok.Span.End.Position != spanEnd);
                    } else if (tok.ClassificationType.IsOfType(JPredefinedClassificationTypeNames.Dot)) {
                        removeLastPart = false;
                    } else {
                        lastWasError = true;
                        break;
                    }
                }

                if (!lastWasError) {
                    if (removeLastPart && nameParts.Count > 0) {
                        nameParts.RemoveAt(nameParts.Count - 1);
                    }
                    _namespace = nameParts.ToArray();
                }
            }
        }
		public override Task<Script> InsertWithCursor (string operation, InsertPosition defaultPosition, IList<AstNode> nodes)
		{
			var tcs = new TaskCompletionSource<Script> ();
			var editor = context.TextEditor;
			DocumentLocation loc = context.TextEditor.Caret.Location;
			var declaringType = context.ParsedDocument.GetInnermostTypeDefinition (loc);
			var mode = new InsertionCursorEditMode (
				editor.Parent,
				MonoDevelop.Ide.TypeSystem.CodeGenerationService.GetInsertionPoints (context.TextEditor, context.ParsedDocument, declaringType));
			if (mode.InsertionPoints.Count == 0) {
				MessageService.ShowError (
					GettextCatalog.GetString ("No valid insertion point can be found in type '{0}'.", declaringType.Name)
				);
				return tcs.Task;
			}
			var helpWindow = new Mono.TextEditor.PopupWindow.InsertionCursorLayoutModeHelpWindow ();
			helpWindow.TitleText = operation;
			mode.HelpWindow = helpWindow;
			
			switch (defaultPosition) {
			case InsertPosition.Start:
				mode.CurIndex = 0;
				break;
			case InsertPosition.End:
				mode.CurIndex = mode.InsertionPoints.Count - 1;
				break;
			case InsertPosition.Before:
				for (int i = 0; i < mode.InsertionPoints.Count; i++) {
					if (mode.InsertionPoints [i].Location < loc)
						mode.CurIndex = i;
				}
				break;
			case InsertPosition.After:
				for (int i = 0; i < mode.InsertionPoints.Count; i++) {
					if (mode.InsertionPoints [i].Location > loc) {
						mode.CurIndex = i;
						break;
					}
				}
				break;
			}
			operationsRunning++;
			mode.StartMode ();
			mode.Exited += delegate(object s, InsertionCursorEventArgs iCArgs) {
				if (iCArgs.Success) {
					if (iCArgs.InsertionPoint.LineAfter == NewLineInsertion.None && 
					    iCArgs.InsertionPoint.LineBefore == NewLineInsertion.None && nodes.Count () > 1) {
						iCArgs.InsertionPoint.LineAfter = NewLineInsertion.BlankLine;
					}
					foreach (var node in nodes.Reverse ()) {
						var output = OutputNode (MonoDevelop.Ide.TypeSystem.CodeGenerationService.CalculateBodyIndentLevel (declaringType), node);
						var offset = context.TextEditor.LocationToOffset (iCArgs.InsertionPoint.Location);
						var delta = iCArgs.InsertionPoint.Insert (editor, output.Text);
						output.RegisterTrackedSegments (this, delta + offset);
					}
					tcs.SetResult (this);
				} else {
					Rollback ();
				}
				DisposeOnClose (); 
			};
			return tcs.Task;
		}
Example #14
0
		void InsertWithCursorOnLayer(EditorScript currentScript, InsertionCursorLayer layer, TaskCompletionSource<Script> tcs, IList<AstNode> nodes, IDocument target)
		{
			var doc = target as TextDocument;
			var op = new UndoOperation(layer, tcs);
			if (doc != null) {
				doc.UndoStack.Push(op);
			}
			layer.ScrollToInsertionPoint();
			layer.Exited += delegate(object s, InsertionCursorEventArgs args) {
				doc.UndoStack.StartContinuedUndoGroup();
				try {
					if (args.Success) {
						if (args.InsertionPoint.LineAfter == NewLineInsertion.None &&
						    args.InsertionPoint.LineBefore == NewLineInsertion.None && nodes.Count > 1) {
							args.InsertionPoint.LineAfter = NewLineInsertion.BlankLine;
						}
						
						var insertionPoint = args.InsertionPoint;
						if (nodes.All(n => n is EnumMemberDeclaration)) {
							insertionPoint.LineAfter = NewLineInsertion.Eol;
							insertionPoint.LineBefore = NewLineInsertion.None;
						}

						int offset = currentScript.GetCurrentOffset(insertionPoint.Location);
						int indentLevel = currentScript.GetIndentLevelAt(Math.Max(0, offset - 1));
						
						foreach (var node in nodes.Reverse()) {
							var output = currentScript.OutputNode(indentLevel, node);
							var text = output.Text;
							if (node is EnumMemberDeclaration) {
								if (insertionPoint != layer.InsertionPoints.Last()) {
									text += ",";
								} else {
									var parentEnum = currentScript.context.RootNode.GetNodeAt(insertionPoint.Location, n => (n is TypeDeclaration) && ((TypeDeclaration)n).ClassType == ClassType.Enum) as TypeDeclaration;
									if (parentEnum != null) {
										var lastMember = parentEnum.Members.LastOrDefault();
										if (lastMember != null) {
											var segment = currentScript.GetSegment(lastMember);
											currentScript.InsertText(segment.EndOffset, ",");
										}
									}
								}
							}
							int delta = insertionPoint.Insert(target, text);
							output.RegisterTrackedSegments(currentScript, delta + offset);
						}
						currentScript.FormatText(nodes);
						tcs.SetResult(currentScript);
					}
					layer.Dispose();
					DisposeOnClose();
				} finally {
					doc.UndoStack.EndUndoGroup();
				}
				op.Reset();
			};
		}
Example #15
0
        private void DisposeInterceptorsSafely(IList<IRequestHandlerInterceptor> interceptors)
        {
            if (interceptors == null)
            {
                return;
            }

            foreach (var interceptor in interceptors.Reverse())
            {
                try
                {
                    IoC.Container.Release(interceptor);
                    interceptor.Dispose();
                }
                catch (Exception exc)
                {
                    logger.Error("error disposing " + interceptor, exc);
                }
            }
        }
Example #16
0
        public void MoveChannels(IList<ChannelInfo> channels, bool up)
        {
            if (channels.Count == 0)
            return;
              if (up && channels[0].GetPosition(this.SubListIndex) <= this.ChannelList.FirstProgramNumber)
            return;

              int delta = (up ? -1 : +1);
              foreach (var channel in (up ? channels : channels.Reverse()))
              {
            int newProgramNr = channel.GetPosition(this.SubListIndex) + delta;
            ChannelInfo channelAtNewPos =
              this.ChannelList.Channels.FirstOrDefault(ch => ch.GetPosition(this.SubListIndex) == newProgramNr);
            if (channelAtNewPos != null)
              channelAtNewPos.ChangePosition(this.SubListIndex, -delta);
            channel.ChangePosition(this.SubListIndex, delta);
              }
              this.DataRoot.NeedsSaving = true;
        }
 private int CommonNodesLengthFromEnd(IList<SyntaxNode> nodes1, IList<SyntaxNode> nodes2)
 {
     nodes1 = nodes1.Reverse().ToList();
     nodes2 = nodes2.Reverse().ToList();
     return CommonNodesLengthFromStart(nodes1, nodes2);
 }
    public void MoveTowards(Transform target)
    {
        path = GetPathTo(target);

        var colors = new[] { Color.magenta, Color.yellow };

        for (int i = 1; i < path.Count; i++)
        {
            Debug.DrawLine(path[i - 1], path[i], colors[i%colors.Length]);
        }

        Vector2 targetPosition = transform.position;
        Vector2 position = transform.position;
        foreach (var nodePosition in path.Reverse())
        {
            var distance = nodePosition - position;
            var direction = distance.normalized;

            var hit = Physics2D.Raycast(position, direction, distance.magnitude, wallMask);
            if (!hit)
            {
                targetPosition = nodePosition;
                break;
            }
        }

        Debug.DrawLine(position, targetPosition, Color.cyan);

        var currentAngle = transform.eulerAngles.z;

        var targetDistance = targetPosition - position;
        var targetAngle = FastMath.Atan2(-targetDistance.x, targetDistance.y) * Mathf.Rad2Deg;

        System.Func<float, float, float> mod = (a, n) => a - Mathf.Floor(a/n) * n;

        var da = Mathf.Abs(mod((targetAngle - currentAngle + 180), 360) - 180);

        //Debug.LogFormat("{0}, {1}, {2}", targetAngle, currentAngle, da);

        var targetRotation = Quaternion.Euler(0, 0, targetAngle);
        transform.rotation = Quaternion.RotateTowards(transform.rotation, targetRotation, turnRate  * 5 * Time.deltaTime);

        var frontOffset = transform.TransformPoint(Vector3.up * repelFocus);
        var rightOffset = transform.TransformPoint(repelOffset);
        var leftOffset = transform.TransformPoint(new Vector3(-repelOffset.x, repelOffset.y));

        Debug.DrawLine(rightOffset, rightOffset + (frontOffset - rightOffset).normalized * repelCastDistance);
        if (Physics2D.Raycast(rightOffset, frontOffset - rightOffset, repelCastDistance, wallMask))
        {
            repel = Mathf.Max(repel - repelIncrement, -maxRepel);
        }

        Debug.DrawLine(leftOffset, leftOffset + (frontOffset - leftOffset).normalized * repelCastDistance);
        if (Physics2D.Raycast(leftOffset, frontOffset - leftOffset, repelCastDistance, wallMask))
        {
            repel = Mathf.Min(repel + repelIncrement, maxRepel);
        }

        repel = repel * (1 - rapelDecay); // Mathf.Abs(repel) < .01f ? 0 : repel * .8f;

        if (da > 90)
        {
            reverseTime = .25f;
        }

        var reverse = false;
        if (reverseTime > 0)
        {
            reverse = true;
            reverseTime -= Time.deltaTime;
            return;
        }

        //var reverse = da > 60;
        //if (reverse)
        //    return;

        var moveDirection = new Vector3(repel, (1 - Mathf.Abs(repel)) * (reverse ? -1 : 1));

        // "forward"
        Debug.DrawLine(transform.position, transform.position + transform.up);
        Debug.DrawLine(transform.position, transform.position + transform.TransformDirection(moveDirection).normalized, Color.yellow);

        if (GetComponent<Animator>())
        {
            GetComponent<Animator>().SetFloat("Slide", repel);
        }

        transform.Translate(moveDirection * moveSpeed * .1f * Time.deltaTime, Space.Self);
    }
Example #19
0
		void InsertWithCursorOnLayer(EditorScript currentScript, InsertionCursorLayer layer, TaskCompletionSource<Script> tcs, IList<AstNode> nodes, IDocument target)
		{
			layer.Exited += delegate(object s, InsertionCursorEventArgs args) {
				if (args.Success) {
					if (args.InsertionPoint.LineAfter == NewLineInsertion.None &&
					    args.InsertionPoint.LineBefore == NewLineInsertion.None && nodes.Count > 1) {
						args.InsertionPoint.LineAfter = NewLineInsertion.BlankLine;
					}
					foreach (var node in nodes.Reverse ()) {
						int indentLevel = currentScript.GetIndentLevelAt(target.GetOffset(args.InsertionPoint.Location));
						var output = currentScript.OutputNode(indentLevel, node);
						var offset = target.GetOffset(args.InsertionPoint.Location);
						var delta = args.InsertionPoint.Insert(target, output.Text);
						output.RegisterTrackedSegments(currentScript, delta + offset);
					}
					tcs.SetResult(currentScript);
				}
				layer.Dispose();
				DisposeOnClose();
			};
		}
Example #20
0
        private IEnumerable<Exception> RunInvokedInterceptorsSafely(RequestProcessingContext requestProcessingState, IList<IRequestHandlerInterceptor> invokedInterceptors)
        {
            var exceptionsFromInterceptor = new List<Exception>();
            foreach (var interceptor in invokedInterceptors.Reverse())
            {
                try
                {
                    interceptor.AfterHandlingRequest(requestProcessingState);
                }
                catch (Exception exc)
                {
                    exceptionsFromInterceptor.Add(exc);
                }
            }

            return exceptionsFromInterceptor;
        }
Example #21
0
        /// <summary>
        /// Called by the <see cref="BlockChain"/> when the best chain (representing total work done) has changed. In this case,
        /// we need to go through our transactions and find out if any have become invalid. It's possible for our balance
        /// to go down in this case: money we thought we had can suddenly vanish if the rest of the network agrees it
        /// should be so.
        /// </summary>
        /// <remarks>
        /// The oldBlocks/newBlocks lists are ordered height-wise from top first to bottom last.
        /// </remarks>
        /// <exception cref="VerificationException"/>
        internal void Reorganize(IList<StoredBlock> oldBlocks, IList<StoredBlock> newBlocks)
        {
            lock (this)
            {
                // This runs on any peer thread with the block chain synchronized.
                //
                // The reorganize functionality of the wallet is tested in ChainSplitTests.
                //
                // For each transaction we track which blocks they appeared in. Once a re-org takes place we have to find all
                // transactions in the old branch, all transactions in the new branch and find the difference of those sets.
                //
                // receive() has been called on the block that is triggering the re-org before this is called.

                _log.Info("  Old part of chain (top to bottom):");
                foreach (var b in oldBlocks) _log.InfoFormat("    {0}", b.Header.HashAsString);
                _log.InfoFormat("  New part of chain (top to bottom):");
                foreach (var b in newBlocks) _log.InfoFormat("    {0}", b.Header.HashAsString);

                // Transactions that appear in the old chain segment.
                IDictionary<Sha256Hash, Transaction> oldChainTransactions = new Dictionary<Sha256Hash, Transaction>();
                // Transactions that appear in the old chain segment and NOT the new chain segment.
                IDictionary<Sha256Hash, Transaction> onlyOldChainTransactions = new Dictionary<Sha256Hash, Transaction>();
                // Transactions that appear in the new chain segment.
                IDictionary<Sha256Hash, Transaction> newChainTransactions = new Dictionary<Sha256Hash, Transaction>();
                // Transactions that don't appear in either the new or the old section, ie, the shared trunk.
                IDictionary<Sha256Hash, Transaction> commonChainTransactions = new Dictionary<Sha256Hash, Transaction>();

                IDictionary<Sha256Hash, Transaction> all = new Dictionary<Sha256Hash, Transaction>();
                foreach (var pair in Unspent.Concat(Spent).Concat(_inactive))
                {
                    all[pair.Key] = pair.Value;
                }
                foreach (var tx in all.Values)
                {
                    var appearsIn = tx.AppearsIn;
                    Debug.Assert(appearsIn != null);
                    // If the set of blocks this transaction appears in is disjoint with one of the chain segments it means
                    // the transaction was never incorporated by a miner into that side of the chain.
                    var inOldSection = appearsIn.Any(oldBlocks.Contains) || oldBlocks.Any(appearsIn.Contains);
                    var inNewSection = appearsIn.Any(newBlocks.Contains) || newBlocks.Any(appearsIn.Contains);
                    var inCommonSection = !inNewSection && !inOldSection;

                    if (inCommonSection)
                    {
                        Debug.Assert(!commonChainTransactions.ContainsKey(tx.Hash), "Transaction appears twice in common chain segment");
                        commonChainTransactions[tx.Hash] = tx;
                    }
                    else
                    {
                        if (inOldSection)
                        {
                            Debug.Assert(!oldChainTransactions.ContainsKey(tx.Hash), "Transaction appears twice in old chain segment");
                            oldChainTransactions[tx.Hash] = tx;
                            if (!inNewSection)
                            {
                                Debug.Assert(!onlyOldChainTransactions.ContainsKey(tx.Hash), "Transaction appears twice in only-old map");
                                onlyOldChainTransactions[tx.Hash] = tx;
                            }
                        }
                        if (inNewSection)
                        {
                            Debug.Assert(!newChainTransactions.ContainsKey(tx.Hash), "Transaction appears twice in new chain segment");
                            newChainTransactions[tx.Hash] = tx;
                        }
                    }
                }

                // If there is no difference it means we have nothing we need to do and the user does not care.
                var affectedUs = oldChainTransactions.Count != newChainTransactions.Count ||
                                 !oldChainTransactions.All(
                                     item =>
                                     {
                                         Transaction rightValue;
                                         return newChainTransactions.TryGetValue(item.Key, out rightValue) && Equals(item.Value, rightValue);
                                     });
                _log.Info(affectedUs ? "Re-org affected our transactions" : "Re-org had no effect on our transactions");
                if (!affectedUs) return;

                // For simplicity we will reprocess every transaction to ensure it's in the right bucket and has the right
                // connections. Attempting to update each one with minimal work is possible but complex and was leading to
                // edge cases that were hard to fix. As re-orgs are rare the amount of work this implies should be manageable
                // unless the user has an enormous wallet. As an optimization fully spent transactions buried deeper than
                // 1000 blocks could be put into yet another bucket which we never touch and assume re-orgs cannot affect.

                foreach (var tx in onlyOldChainTransactions.Values) _log.InfoFormat("  Only Old: {0}", tx.HashAsString);
                foreach (var tx in oldChainTransactions.Values) _log.InfoFormat("  Old: {0}", tx.HashAsString);
                foreach (var tx in newChainTransactions.Values) _log.InfoFormat("  New: {0}", tx.HashAsString);

                // Break all the existing connections.
                foreach (var tx in all.Values)
                    tx.DisconnectInputs();
                foreach (var tx in Pending.Values)
                    tx.DisconnectInputs();
                // Reconnect the transactions in the common part of the chain.
                foreach (var tx in commonChainTransactions.Values)
                {
                    var badInput = tx.ConnectForReorganize(all);
                    Debug.Assert(badInput == null, "Failed to connect " + tx.HashAsString + ", " + badInput);
                }
                // Recalculate the unspent/spent buckets for the transactions the re-org did not affect.
                Unspent.Clear();
                Spent.Clear();
                _inactive.Clear();
                foreach (var tx in commonChainTransactions.Values)
                {
                    var unspentOutputs = 0;
                    foreach (var output in tx.Outputs)
                    {
                        if (output.IsAvailableForSpending) unspentOutputs++;
                    }
                    if (unspentOutputs > 0)
                    {
                        _log.InfoFormat("  TX {0}: ->unspent", tx.HashAsString);
                        Unspent[tx.Hash] = tx;
                    }
                    else
                    {
                        _log.InfoFormat("  TX {0}: ->spent", tx.HashAsString);
                        Spent[tx.Hash] = tx;
                    }
                }
                // Now replay the act of receiving the blocks that were previously in a side chain. This will:
                //   - Move any transactions that were pending and are now accepted into the right bucket.
                //   - Connect the newly active transactions.
                foreach (var b in newBlocks.Reverse()) // Need bottom-to-top but we get top-to-bottom.
                {
                    _log.InfoFormat("Replaying block {0}", b.Header.HashAsString);
                    ICollection<Transaction> txns = new HashSet<Transaction>();
                    foreach (var tx in newChainTransactions.Values)
                    {
                        if (tx.AppearsIn.Contains(b))
                        {
                            txns.Add(tx);
                            _log.InfoFormat("  containing tx {0}", tx.HashAsString);
                        }
                    }
                    foreach (var t in txns)
                    {
                        Receive(t, b, BlockChain.NewBlockType.BestChain, true);
                    }
                }

                // Find the transactions that didn't make it into the new chain yet. For each input, try to connect it to the
                // transactions that are in {spent,unspent,pending}. Check the status of each input. For inactive
                // transactions that only send us money, we put them into the inactive pool where they sit around waiting for
                // another re-org or re-inclusion into the main chain. For inactive transactions where we spent money we must
                // put them back into the pending pool if we can reconnect them, so we don't create a double spend whilst the
                // network heals itself.
                IDictionary<Sha256Hash, Transaction> pool = new Dictionary<Sha256Hash, Transaction>();
                foreach (var pair in Unspent.Concat(Spent).Concat(Pending))
                {
                    pool[pair.Key] = pair.Value;
                }
                IDictionary<Sha256Hash, Transaction> toReprocess = new Dictionary<Sha256Hash, Transaction>();
                foreach (var pair in onlyOldChainTransactions.Concat(Pending))
                {
                    toReprocess[pair.Key] = pair.Value;
                }
                _log.Info("Reprocessing:");
                // Note, we must reprocess dead transactions first. The reason is that if there is a double spend across
                // chains from our own coins we get a complicated situation:
                //
                // 1) We switch to a new chain (B) that contains a double spend overriding a pending transaction. It goes dead.
                // 2) We switch BACK to the first chain (A). The dead transaction must go pending again.
                // 3) We resurrect the transactions that were in chain (B) and assume the miners will start work on putting them
                //    in to the chain, but it's not possible because it's a double spend. So now that transaction must become
                //    dead instead of pending.
                //
                // This only occurs when we are double spending our own coins.
                foreach (var tx in _dead.Values.ToList())
                {
                    ReprocessTxAfterReorg(pool, tx);
                }
                foreach (var tx in toReprocess.Values)
                {
                    ReprocessTxAfterReorg(pool, tx);
                }

                _log.InfoFormat("post-reorg balance is {0}", Utils.BitcoinValueToFriendlyString(GetBalance()));

                // Inform event listeners that a re-org took place.
                if (Reorganized != null)
                {
                    // Synchronize on the event listener as well. This allows a single listener to handle events from
                    // multiple wallets without needing to worry about being thread safe.
                    lock (Reorganized)
                    {
                        Reorganized(this, EventArgs.Empty);
                    }
                }
            }
        }
Example #22
0
        private void WriteReferences(XmlDocument xmlDoc, IList<Reference> references)
        {
            var proj = xmlDoc.GetElementsByTagName("Project").Item(0) as XmlNode;

            XmlNode refItemGroup = null;
            var refNodes = xmlDoc.GetElementsByTagName("Reference");
            var removeNodes = new List<XmlNode>();
            foreach (var refNode in refNodes) {
                var node = refNode as XmlNode;
                refItemGroup = node.ParentNode;
                var hintNode = node.GetChildNamed("HintPath");
                if (hintNode != null) {
                    removeNodes.Add(node);
                    //and whitespace node so we don't add loads of
                    //empty lines
                    if (node.PreviousSibling.NodeType == XmlNodeType.Whitespace) {
                        removeNodes.Add(node.PreviousSibling);
                    }
                }
            }

            foreach (var node in removeNodes) {
                if (node.ParentNode != null) {
                    node.ParentNode.RemoveChild(node);
                }
            }

            var appendReferences = references.Reverse();
            foreach (var reference in appendReferences) {
                AddReferenceTo(refItemGroup, reference);
            }

            //TODO:if we have any embedded resources, then update the resources section
            WriteXml(xmlDoc);
        }
        protected internal virtual IAsyncResult BeginInvokeActionMethodWithFilters(ControllerContext controllerContext, IList<IActionFilter> filters, ActionDescriptor actionDescriptor, IDictionary<string, object> parameters, AsyncCallback callback, object state)
        {
            Func<ActionExecutedContext> endContinuation = null;

            BeginInvokeDelegate beginDelegate = delegate(AsyncCallback asyncCallback, object asyncState)
            {
                ActionExecutingContext preContext = new ActionExecutingContext(controllerContext, actionDescriptor, parameters);
                IAsyncResult innerAsyncResult = null;

                Func<Func<ActionExecutedContext>> beginContinuation = () =>
                {
                    innerAsyncResult = BeginInvokeActionMethod(controllerContext, actionDescriptor, parameters, asyncCallback, asyncState);
                    return () =>
                           new ActionExecutedContext(controllerContext, actionDescriptor, false /* canceled */, null /* exception */)
                           {
                               Result = EndInvokeActionMethod(innerAsyncResult)
                           };
                };

                // need to reverse the filter list because the continuations are built up backward
                Func<Func<ActionExecutedContext>> thunk = filters.Reverse().Aggregate(beginContinuation,
                                                                                      (next, filter) => () => InvokeActionMethodFilterAsynchronously(filter, preContext, next));
                endContinuation = thunk();

                if (innerAsyncResult != null)
                {
                    // we're just waiting for the inner result to complete
                    return innerAsyncResult;
                }
                else
                {
                    // something was short-circuited and the action was not called, so this was a synchronous operation
                    SimpleAsyncResult newAsyncResult = new SimpleAsyncResult(asyncState);
                    newAsyncResult.MarkCompleted(true /* completedSynchronously */, asyncCallback);
                    return newAsyncResult;
                }
            };

            EndInvokeDelegate<ActionExecutedContext> endDelegate = delegate(IAsyncResult asyncResult)
            {
                return endContinuation();
            };

            return AsyncResultWrapper.Begin(callback, state, beginDelegate, endDelegate, _invokeActionMethodWithFiltersTag);
        }
        protected virtual ResultExecutedContext InvokeActionResultWithFilters(ControllerContext controllerContext, IList<IResultFilter> filters, ActionResult actionResult)
        {
            ResultExecutingContext preContext = new ResultExecutingContext(controllerContext, actionResult);
            Func<ResultExecutedContext> continuation = delegate
            {
                InvokeActionResult(controllerContext, actionResult);
                return new ResultExecutedContext(controllerContext, actionResult, false /* canceled */, null /* exception */);
            };

            // need to reverse the filter list because the continuations are built up backward
            Func<ResultExecutedContext> thunk = filters.Reverse().Aggregate(continuation,
                                                                            (next, filter) => () => InvokeActionResultFilter(filter, preContext, next));
            return thunk();
        }
Example #25
0
		void InsertWithCursorOnLayer(EditorScript currentScript, InsertionCursorLayer layer, TaskCompletionSource<Script> tcs, IList<AstNode> nodes, IDocument target)
		{
			var doc = target as TextDocument;
			var op = new UndoOperation(layer, tcs);
			if (doc != null) {
				doc.UndoStack.Push(op);
			}
			layer.Exited += delegate(object s, InsertionCursorEventArgs args) {
				doc.UndoStack.StartContinuedUndoGroup();
				try {
					if (args.Success) {
						if (args.InsertionPoint.LineAfter == NewLineInsertion.None &&
						    args.InsertionPoint.LineBefore == NewLineInsertion.None && nodes.Count > 1) {
							args.InsertionPoint.LineAfter = NewLineInsertion.BlankLine;
						}
						foreach (var node in nodes.Reverse ()) {
							int indentLevel = currentScript.GetIndentLevelAt(target.GetOffset(args.InsertionPoint.Location));
							var output = currentScript.OutputNode(indentLevel, node);
							var offset = target.GetOffset(args.InsertionPoint.Location);
							var delta = args.InsertionPoint.Insert(target, output.Text);
							output.RegisterTrackedSegments(currentScript, delta + offset);
						}
						tcs.SetResult(currentScript);
					}
					layer.Dispose();
					DisposeOnClose();
				} finally {
					doc.UndoStack.EndUndoGroup();
				}
				op.Reset();
			};
		}
Example #26
0
        /// <summary>
        /// Creates a new instance of a complex lens using a definition of
        /// elements.
        /// </summary>
        /// <remarks>
        /// The first and last surfaces have to be spherical. TODO: this is
        /// needed only for simpler sampling. In general planar surfaces or
        /// stops could be sampled too.
        /// </remarks>
        /// <param name="surfaceDefs">List of definitions of spherical or
        /// planar element surfaces or stops. Ordered from front to back.
        /// Must not be empty or null.
        /// </param>
        /// <param name="mediumRefractiveIndex">Index of refraction of medium
        /// outside the lens. It is assumed there is one medium on the scene
        /// side, senzor side and inside the lens.</param>
        /// <returns>The created complex lens instance.</returns>
        public static ComplexLens Create(
            IList<SphericalElementSurfaceDefinition> surfaceDefs,
            double mediumRefractiveIndex,
            double scale)
        {
            var surfaces = new List<ElementSurface>();

            var surfaceDefsReverse = surfaceDefs.Reverse().ToList();
            // scale the lens if needed
            if (Math.Abs(scale - 1.0) > epsilon)
            {
                surfaceDefsReverse = surfaceDefsReverse.Select(surface => surface.Scale(scale)).ToList();
            }
            // thickness of the whole lens (from front to back apex)
            // (without the distance to the senzor - backmost surface def.)
            double lensThickness = surfaceDefsReverse.Skip(1).Sum(def => def.Thickness);
            double elementBasePlaneShiftZ = lensThickness;

            double lastCapHeight = 0;
            double capHeight = 0;

            // definition list is ordered from front to back, working list
            // must be ordered from back to front, so a conversion has to be
            // performed
            int defIndex = 0;
            foreach (var definition in surfaceDefsReverse)
            {
                if (defIndex > 0)
                {
                    elementBasePlaneShiftZ -= definition.Thickness;
                }

                ElementSurface surface = new ElementSurface();
                surface.ApertureRadius = 0.5 * definition.ApertureDiameter;
                if (defIndex + 1 < surfaceDefsReverse.Count)
                {
                    surface.NextRefractiveIndex = surfaceDefsReverse[defIndex + 1].NextRefractiveIndex;
                }
                else
                {
                    surface.NextRefractiveIndex = mediumRefractiveIndex;
                }
                if (definition.CurvatureRadius.HasValue)
                {
                    // spherical surface
                    double radius = definition.CurvatureRadius.Value;
                    // convexity reverses when converting from front-to-back
                    // back-to-front ordering
                    surface.Convex = radius < 0;
                    Sphere sphere = new Sphere()
                    {
                        Radius = Math.Abs(radius)
                    };
                    sphere.Center = Math.Sign(radius) *
                        sphere.GetCapCenter(surface.ApertureRadius, Vector3d.UnitZ);
                    capHeight = Math.Sign(radius) * sphere.GetCapHeight(sphere.Radius, surface.ApertureRadius);
                    elementBasePlaneShiftZ -= lastCapHeight - capHeight;
                    sphere.Center += new Vector3d(0, 0, elementBasePlaneShiftZ);
                    surface.Surface = sphere;
                    surface.SurfaceNormalField = sphere;
                }
                else
                {
                    // planar surface
                    // both media are the same -> circular stop
                    // else -> planar element surface
                    surface.NextRefractiveIndex = definition.NextRefractiveIndex;
                    surface.Convex = true;
                    capHeight = 0;
                    elementBasePlaneShiftZ -= lastCapHeight - capHeight;
                    Circle circle = new Circle()
                    {
                        Radius = 0.5 * definition.ApertureDiameter,
                        Z = elementBasePlaneShiftZ,
                    };

                    surface.Surface = circle;
                    surface.SurfaceNormalField = circle;
                }
                lastCapHeight = capHeight;
                surfaces.Add(surface);
                defIndex++;
            }

            //DEBUG
            //foreach (var surface in surfaces)
            //{
            //    Console.WriteLine("{0}, {1}, {2}", surface.ApertureRadius,
            //        surface.Convex, surface.NextRefractiveIndex);
            //}

            ComplexLens lens = new ComplexLens(surfaces)
            {
                MediumRefractiveIndex = mediumRefractiveIndex
            };
            return lens;
        }
        protected internal virtual IAsyncResult BeginInvokeActionMethodWithFilters(ControllerContext controllerContext, IList<IActionFilter> filters, ActionDescriptor actionDescriptor, IDictionary<string, object> parameters, AsyncCallback callback, object state) {
            ActionExecutingContext preContext = new ActionExecutingContext(controllerContext, actionDescriptor, parameters);

            // what makes this different from the synchronous version of this method is that we have to
            // aggregate both the begin + end delegates together. overall, though, it's the same logic.
            var continuation = new {
                Begin = (BeginInvokeCallback)((innerCallback, innerState) => BeginInvokeActionMethod(controllerContext, actionDescriptor, parameters, innerCallback, innerState)),
                End = (AsyncCallback<ActionExecutedContext>)(ar => new ActionExecutedContext(controllerContext, actionDescriptor, false /* canceled */, null /* exception */) {
                    Result = EndInvokeActionMethod(ar)
                })
            };

            // need to reverse the filter list because the continuations are built up backward
            var invocation = filters.Reverse().Aggregate(continuation,
                (next, filter) => new {
                    Begin = (BeginInvokeCallback)((innerCallback, innerState) => BeginInvokeActionMethodFilter(filter, preContext, next.Begin, next.End, innerCallback, innerState)),
                    End = (AsyncCallback<ActionExecutedContext>)EndInvokeActionMethodFilter
                });

            return AsyncResultWrapper.Wrap(callback, state, invocation.Begin, invocation.End, _invokeActionMethodWithFiltersTag);
        }
 private void PackagesServiceRunFinished(IList<Package> packages)
 {
     if (this.InvokeRequired)
     {
         Invoke(new PackageSServiceHandler(PackagesServiceRunFinished), new object[] { packages });
     }
     else
     {
         EnableUserInteraction();
         ClearStatus();
         var distinct = packages;
         if (packageTabControl.SelectedTab == tabInstalled)
             distinct = packages.Reverse().Distinct().Reverse().ToList();
         lblStatus.Text = string.Format(strings.num_installed_packages, distinct.Count());
         this.Activate();
         PackageGrid.DataSource = distinct;
     }
 }
        protected virtual ExceptionContext InvokeExceptionFilters(ControllerContext controllerContext, IList<IExceptionFilter> filters, Exception exception)
        {
            ExceptionContext context = new ExceptionContext(controllerContext, exception);
            foreach (IExceptionFilter filter in filters.Reverse())
            {
                filter.OnException(context);
            }

            return context;
        }
Example #30
0
        private string GetCombatLog(IList<CombatLogEntry> logEntries)
        {
            if (_combatSteps.Current == CombatStep.PlayerDead)
            {
                return null;
            }

            return string.Join(
                Environment.NewLine + Environment.NewLine,
                logEntries.Reverse().Select(x => x.Text));
        }