public NodeList<ScEvent> ProcessGames(List<ScGame> games) { NodeList<ScEvent> roots = new NodeList<ScEvent>(); NodeList<ScEvent> allgames = new NodeList<ScEvent>(); foreach (ScGame game in games) { Node<ScEvent> node = new Node<ScEvent>(1, game.Events[0], buildTree(0, game, games)); allgames.Add(node); long counter = 0; foreach (Node<ScEvent> root in roots) { if (root.Value.Unit == node.Value.Unit) { counter++; foreach (Node<ScEvent> n in node.Neighbors) { List<Node<ScEvent>> q = root.Neighbors.Where(e => e.Value.Unit == n.Value.Unit).ToList(); if (q.Count == 0) root.Neighbors.Add(n); } } } if (roots.Count == 0 || counter == 0) roots.Add(node); } CountOccurances(roots, allgames); return roots; }
public CartoSelector(IEnumerable<Element> elements, Env env) : base(elements) { m_filters = new CartoFilterSet(); m_zooms = new NodeList<CartoZoomElement>(); m_elements = new NodeList<CartoElement>(); m_conditions = 0; if (env == null) env = new Env(); // TODO foreach (Element elem in elements) { if (elem is CartoFilterElement) { m_filters.Add(elem as CartoFilterElement, env); m_conditions++; } else if (elem is CartoZoomElement) { m_zooms.Add(elem as CartoZoomElement); m_conditions++; } else if (elem is CartoAttachmentElement) m_attachment = (elem as CartoAttachmentElement).Value; else m_elements.Add((CartoElement)elem); } }
public void Build() { NodeList<BuildAction> roots = new NodeList<BuildAction>(); NodeList<BuildAction> allgames = new NodeList<BuildAction>(); foreach (var replay in m_replays) { foreach (var player in replay.Players) { var actions = replay.Actions.Where(x => x.Player == player && x.ActionType == Entities.ActionType.Build) .OrderBy(y => y.Sequence) .Cast<BuildAction>(); if (actions.Count() > 0) { BuildAction action = actions.ElementAt(0); Node<BuildAction> node = new Node<BuildAction>(1, action, buildTree(actions)); allgames.Add(node); if (roots.Where(x => x.Value.ObjectType == action.ObjectType).Count() == 0) { roots.Add(node); } } } } countOccurances(roots, allgames); m_roots = roots; m_allGames = allgames; }
public Ruleset Evaluate(List<NamedArgument> args, Env env, List<Ruleset> closureContext) { var frame = EvaluateParams(env, args); var frames = new[] { this, frame }.Concat(env.Frames).Concat(closureContext).Reverse(); var context = env.CreateChildEnv(new Stack<Ruleset>(frames)); var newRules = new NodeList(); foreach (var rule in Rules) { if (rule is MixinDefinition) { var mixin = rule as MixinDefinition; var parameters = Enumerable.Concat(mixin.Params, frame.Rules.Cast<Rule>()); newRules.Add(new MixinDefinition(mixin.Name, new NodeList<Rule>(parameters), mixin.Rules, mixin.Condition)); } else if (rule is Directive) { newRules.Add(rule); } else if (rule is Ruleset) { var ruleset = (rule as Ruleset); context.Frames.Push(ruleset); var rules = new NodeList(NodeHelper.NonDestructiveExpandNodes<MixinCall>(context, ruleset.Rules) .Select(r => r.Evaluate(context))); context.Frames.Pop(); newRules.Add(new Ruleset(ruleset.Selectors, rules)); } else if (rule is MixinCall) { newRules.AddRange((NodeList)rule.Evaluate(context)); } else { newRules.Add(rule.Evaluate(context)); } } return new Ruleset(null, newRules); }
public static List<Node> PossibleNode = new List<Node>(); // Les noeuds possibles (cases adjacentes de tout le chemin) #endregion Fields #region Methods public static MyLinkedList<Tile> CalculatePathWithAStar(Map map, Tile startTile, Tile endTile) { PossibleNode.Clear(); NodeList<Node> openList = new NodeList<Node>(); // Contiens tout les noeuds candidat (qui vont être examinés) NodeList<Node> closedList = new NodeList<Node>(); // Contiens la liste des meilleurs noeuds (le resultat du plus cours chemin) List<Node> possibleNodes; // cases adjacentes du noeud courant // Le noeud de départ Node startNode = new Node(startTile, null, endTile); // FIXME : on recupère le noeud de départ /**********************************/ /* Traitement des noeuds candidat */ /**********************************/ openList.Add(startNode); while (openList.Count > 0) // Tant que la liste ouverte contient des éléments { Node current = openList[0]; openList.RemoveAt(0); closedList.Add(current); if (current.Tile == endTile) // si l'élément courant est la case destination { MyLinkedList<Tile> solution = new MyLinkedList<Tile>(); // on reverse la liste fermée et on la retourne pour l'avoir dans le bonne ordre while (current.Parent != null) { solution.AddFirst(current.Tile); current = current.Parent; } return solution; } possibleNodes = current.GetPossibleNode(map, endTile); // FIXME : recupère la listes des cases adjacentes // on ajoute cette liste a notre variable static qui contient l'ensemble des listes adjacentes (gestion de l'affichage) PossibleNode.AddRange(possibleNodes) ; /***************************************/ /* Ajout des noeuds adjacents candidat */ /***************************************/ for (int i = 0; i < possibleNodes.Count; i++) // on vérifie que chaque noeuds adjacent (possibleNodes) { if (!closedList.Contains(possibleNodes[i])) // n'existe pas dans la liste fermée (eviter la redondance) { if (openList.Contains(possibleNodes[i])) // FIXME : Si il existe dans la liste ouverte on vérifie { if (possibleNodes[i].EstimatedMovement < openList[possibleNodes[i]].EstimatedMovement) // si le cout de deplacement du // noeud est inferieur a un coût calculer précedement, dance cas la on remonte le chemin dans la liste ouverte openList[possibleNodes[i]].Parent = current; } else openList.DichotomicInsertion(possibleNodes[i]); } } } return null; }
public override Node Evaluate(Env env) { NodeList<Element> evaldElements = new NodeList<Element>(); foreach (Element element in Elements) { evaldElements.Add(element.Evaluate(env) as Element); } return new Selector(evaldElements).ReducedFrom<Selector>(this); }
public NodeList<ScEvent> buildTree(int counter, ScGame game, List<ScGame> games) { NodeList<ScEvent> result = new NodeList<ScEvent>(); if (++counter < game.Events.Count) { ScEvent r = game.Events[counter]; result.Add(new Node<ScEvent>(1, r, buildTree(counter, game, games))); } return result; }
public NodeList GetGraphNodes() { var list = new NodeList(); var inList = new Hashtable(); //Stack<Node> stack = new Stack<Node>(); for (var node = FirstEntry; node != null; node = node.NextEntry) { list.Add(node); inList[node] = node; for (var node2 = node.First; node2 != null; node2 = node2.Next) { if (inList[node2] == null) { inList[node2] = node2; list.Add(node2); } } //inList[node] = node; //stack.Push(node); //while (stack.Count > 0) //{ // Node top = stack.Pop(); // foreach (Node suc in top.Successors) // { // if (inList[suc] == null) // { // list.Add(suc); // inList[suc] = true; // stack.Push(suc); // } // } //} } return(list); }
/// <summary> /// <c> /// ['('] %expr {',' %expr} [')'] /// </c> /// Helper for parsing multiple comma-separated /// expressions with optional parenthesis /// (e.g. tuples) /// </summary> internal static Expression ParseMultiple( AstNode parent, Func <AstNode, Expression> parserFunc = null, bool parens = false, params Type[] expectedTypes ) { parserFunc = parserFunc ?? ParseVarExpr; if (expectedTypes.Length == 0 || parserFunc == ParseVarExpr) { expectedTypes = Spec.GlobalExprs; } var list = new NodeList <Expression>(parent) { parserFunc(parent) }; if (parens && parent.Peek.Is(CloseParenthesis)) { return(list[0]); } // tuple if (parent.MaybeEat(Comma)) { do { list.Add(parserFunc(parent)); } while (parent.MaybeEat(Comma)); } // generator | comprehension else if (parent.Peek.Is(KeywordFor) && parent.Token.Type != Newline) { list[0] = new ForComprehension(parent, list[0]); if (parens) { list[0] = new GeneratorExpression(parent, (ForComprehension)list[0]); } } CheckType(list, expectedTypes); if (parens && list.Count == 1) { return(new ParenthesizedExpression(list[0])); } return(MaybeTuple(parent, list)); }
public static NodeList <CartoSelector> GetCartoSelectors(this Ruleset ruleset) { NodeList <CartoSelector> selectors = new NodeList <CartoSelector>(); foreach (Selector selector in ruleset.Selectors) { CartoSelector cs = selector as CartoSelector; if (cs) { selectors.Add(cs); } } return(selectors); }
public void Test() { LineInfo line = new LineInfo(1, string.Empty); line.Set("%input{ type=\"checkbox\", value=testCase.Id, name=\"case\", class=lastCat.Replace(' ', '-')}", 0, 0); TagNode tagNode = new TagNode(null); NodeList nodes = new NodeList(); AttributeNode node = new AttributeNode(tagNode); nodes.Add(node); nodes.Add(tagNode); int offset = 6; AttributeNode myNode = (AttributeNode)node.Parse(nodes, tagNode, line, ref offset); bool t = false; string temp = myNode.ToCode(ref t, false); Assert.Equal("\"checkbox\"", myNode.GetAttribute("type").Value); Assert.Equal("testCase.Id", myNode.GetAttribute("value").Value); Assert.Equal("\"case\"", myNode.GetAttribute("name").Value); Assert.Equal("lastCat.Replace(' ', '-')", myNode.GetAttribute("class").Value); }
public NodeList <Node> Arguments(Parser parser) { var args = new NodeList <Node>(); Node arg; while ((arg = Assignment(parser)) || (arg = Expression(parser))) { args.Add(arg); if (!parser.Tokenizer.Match(',')) { break; } } return(args); }
public NodeList <Expression> Arguments(Parser parser) { var args = new NodeList <Expression>(); Expression arg; while (arg = Expression(parser)) { args.Add(arg); if (!parser.Tokenizer.Match(',')) { break; } } return(args); }
private SwitchStatement ParseSwitchStatement() { _scanResult.Next().Is(TokenType.Switch); var result = new SwitchStatement() { Variable = ParseExpression() }; _scanResult.Next().Is(TokenType.AccoladeOpen); var switchCases = new NodeList <SwitchCase>(); while (true) { var next = _scanResult.Next(); if (next.tokenType == TokenType.AccoladeClose) { break; } if (next.tokenType != TokenType.Case) { throw new ParsingException(); } var switchCase = new SwitchCase() { CaseExpression = ParseExpression().As <LiteralExpression>() }; _scanResult.Next().Is(TokenType.Colon); var statements = new NodeList <Statement>(); while (true) { var statement = ParseStatement(); if (statement == null) { break; } statements.Add(statement); } switchCase.Statements = statements; switchCases.Add(switchCase); } result.Cases = switchCases; return(result); }
/// <summary> /// Add brother node. /// </summary> /// <param name="brotherNode"></param> /// <param name="MerkleProofDict"></param> public void AddBrotherNode(Node brotherNode, Dictionary <int, MerkleNode> MerkleProofDict) { if (NodeList == null) { NodeList = new List <Node>(); } NodeList.Add(brotherNode); Distance++; if (ChildrenList != null && ChildrenList.Count > 0) { foreach (int code in ChildrenList) { MerkleProofDict[code].AddBrotherNode(brotherNode, MerkleProofDict); } } }
public override Node Evaluate(Env env) { NodeList <Element> evaldElements = new NodeList <Element>(); foreach (Element element in Elements) { if (element.NodeValue is Extend) { if (env.MediaPath.Any()) { env.MediaPath.Peek().AddExtension(this, (Extend)(((Extend)element.NodeValue).Evaluate(env)), env); } else //Global extend { env.AddExtension(this, (Extend)(((Extend)element.NodeValue).Evaluate(env)), env); } } else { evaldElements.Add(element.Evaluate(env) as Element); } } var evaluatedSelector = new Selector(evaldElements).ReducedFrom <Selector>(this); if (evaluatedSelector.Elements.All(e => e.NodeValue == null)) { return(evaluatedSelector); } Parser.Tokenizer.SetupInput(evaluatedSelector.ToCSS(env), ""); var result = new NodeList <Selector>(); Selector selector; while (selector = Parsers.Selector(Parser)) { selector.IsReference = IsReference; result.Add(selector.Evaluate(env) as Selector); if (!Parser.Tokenizer.Match(',')) { break; } } return(result); }
private Node SetState(CursorState state, string word) { Node node = null; if (state != currentState) { if (state == CursorState.FieldName) { node = new FieldNode(word); } if (state == CursorState.Condition) { node = new ConditionNode(word); } if (state == CursorState.Parameter) { node = new ParameterNode(word); } if (state == CursorState.Constant) { node = new ConstantNode(word); } if (state == CursorState.ArrayOfValues) { node = new ArrayOfValues(word); } if (state == CursorState.OpenBracket) { node = new OpenBracketNode(); } if (state == CursorState.CloseBracket) { node = new CloseBracketNode(); } if (state == CursorState.Operator) { node = new OperatorNode(word); } if (node != null) { nodes.Add(node); } currentState = state; } return(node); }
private Node Expand(ComponentList children) { var childNodes = new NodeList(); if (children != null) { foreach (var child in children) { var childNode = child.Expand(true); childNodes.Add(childNode); } } var node = s_configuration.NodeFactory.Create(this, childNodes); return(node); }
public override Node Evaluate(Env env) { NodeList<Element> evaldElements = new NodeList<Element>(); foreach (Element element in Elements) { if (element.NodeValue is Extend) { env.AddExtension(this, (Extend)(((Extend)element.NodeValue).Evaluate(env)),env); } else { evaldElements.Add(element.Evaluate(env) as Element); } } return new Selector(evaldElements).ReducedFrom<Selector>(this); }
public WfContent this[string fieldName] { get { var content = Repo.Content.Load(_path); if (content == null) { throw new ContentNotFoundException(_path); } Repo.Field field; if (content.Fields.TryGetValue(fieldName, out field)) { var value = content[fieldName]; var nodeValue = value as Node; if (nodeValue != null) { return(new WfContent(nodeValue)); } var enumerableValue = value as System.Collections.IEnumerable; if (enumerableValue != null) { var iter = enumerableValue.GetEnumerator(); if (iter.MoveNext()) { nodeValue = (Node)iter.Current; return(new WfContent(nodeValue)); } } return(null); } throw new ApplicationException(String.Format("Field '{0}' not found in a {1} content: {2} ", fieldName, content.ContentType.Name, content.Path)); } set { var nodes = new NodeList <Node>(); var node = Node.LoadNode(value.Path); nodes.Add(node); var cNode = ContentNode; cNode[fieldName] = nodes; cNode.Save(); //TODO: WF: Write back the timestamp (if the content is the relatedContent) } }
// 図から必要な要素を取得し、リストを作成する internal void LoadDeployDiagramObjectsInformation(EA.Repository Repository) { // ノードの配置に関するモデルの作成,親をたどることにより必要な情報を入手 // 配置図の要素を環境、デバイス、ノード、コンポーネントのリストに格納 EA.Diagram diagram = Repository.GetCurrentDiagram(); var TmpCommunicationList = new List <Communication>(); for (short i = 0; i < diagram.DiagramObjects.Count; i++) { DiagramObject diagramObject = diagram.DiagramObjects.GetAt(i); Element element = Repository.GetElementByID(diagramObject.ElementID); var ConnectorList = new List <Connector>(); var xdoc = element.Notes.ToXDocument(); switch (element.MetaType) { case "ExecutionEnvironment": ExecutionEnvironmentList.Add(new ExecutionEnvironment(element.Name, element.ElementID, element.ParentID, xdoc)); break; case "Device": // デバイスが保持している接続を取得 for (short j = 0; j < element.Connectors.Count; j++) { Connector connector = element.Connectors.GetAt(j); ConnectorList.Add(connector); TmpCommunicationList.Add(new Communication(connector.Name, connector.ConnectorID, connector.DiagramID, connector.Notes.ToXDocument())); } DeviceList.Add(new Device(element.Name, element.ElementID, element.ParentID, xdoc, element.Stereotype, ConnectorList)); break; case "Node": NodeList.Add(new Node(element.Name, element.ElementID, element.ParentID, xdoc)); break; case "Component": ComponentList.Add(new Component(element.Name, element.ElementID, element.ParentID, xdoc)); break; } } TmpCommunicationList.GroupBy(communication => communication.Name) .Select(x => x.FirstOrDefault()) .ToList() .ForEach(Communication => CommunicationList.Add(Communication)); }
public static List<Case> CalculChemin(Carte carte, Case depart, Case arrivee) { List<Case> resultat = new List<Case>(); NodeList<Noeud> listeOuverte = new NodeList<Noeud>(); NodeList<Noeud> listeFermee = new NodeList<Noeud>(); List<Noeud> noeudsPossibles; int nombreNoeudsPossibles; listeOuverte.Add(new Noeud(depart, null, arrivee)); while (listeOuverte.Count > 0) { Noeud current = listeOuverte[0]; listeOuverte.RemoveAt(0); listeFermee.Add(current); if (current.Case == arrivee) { List<Case> solution = new List<Case>(); while (current.Parent != null) { solution.Add(current.Case); current = current.Parent; } return solution; } noeudsPossibles = current.NoeudsPossibles(carte, arrivee); nombreNoeudsPossibles = noeudsPossibles.Count; foreach (Noeud voisin in current.NoeudsPossibles(carte, arrivee)) if (!listeFermee.Contains(voisin)) { if (listeOuverte.Contains(voisin)) { if (voisin.Manhattan < listeOuverte[voisin].Manhattan) listeOuverte[voisin].Parent = current; } else listeOuverte.DichotomicInsertion(voisin); } } return null; }
/// <summary> /// Flattens a list of nodes seperated by comma so that all the conditions are on the bottom. /// e.g. /// (A) and (B) and (C) => A and B and C /// (A, B) and (D) and (C) => A and D and C, B and D and C /// (A) and (B) and (C, D) => A and B and C, A and B and D /// /// It does this by generating a list of permutations for the last n-1 then n-2 /// and with each call it multiplies out the OR'd elements /// </summary> private NodeList Permute(NodeList <NodeList> arr) { // in simple cases return if (arr.Count == 0) { return(new NodeList()); } if (arr.Count == 1) { return(arr[0]); } NodeList returner = new NodeList(); // run permute on the next n-1 NodeList <NodeList> sliced = new NodeList <NodeList>(arr.Skip(1)); NodeList rest = Permute(sliced); //now multiply for (int i = 0; i < rest.Count; i++) { NodeList inner = arr[0]; for (int j = 0; j < inner.Count; j++) { NodeList newl = new NodeList(); newl.Add(inner[j]); NodeList addition = rest[i] as NodeList; if (addition) { newl.AddRange(addition); } else { newl.Add(rest[i]); } //add an expression so it seperated by spaces returner.Add(new Expression(newl)); } } return(returner); }
/// <summary> /// Load listbox items or droplist items from Document. /// </summary> private void LoadList() { //IListBox and IDroplist has a same base IListBase IListBase baselist = widgetsList.ElementAt(0) as IListBase; //only one or same, load first one. foreach (IListItem item in baselist.Items) { NodeViewModel newItem = new NodeViewModel(item.TextValue, item.IsSelected); NodeList.Add(newItem); } //Only IListBox has AllowMultiple property. if (baselist.WidgetType == WidgetType.ListBox) { isMultiple = (baselist as IListBox).AllowMultiple; } }
public override Node Evaluate(Env env) { NodeList <Element> evaldElements = new NodeList <Element>(); foreach (Element element in Elements) { if (element.NodeValue is Extend) { env.AddExtension(this, ((Extend)element.NodeValue).Selectors); } else { evaldElements.Add(element.Evaluate(env) as Element); } } return(new Selector(evaldElements).ReducedFrom <Selector>(this)); }
/// <summary> /// 装载清单数据 /// </summary> public void Load() { RootList.Clear(); NodeList.Clear(); updateList.Clear(); List <ContractBoiChangeNode> lstNode = new List <ContractBoiChangeNode>(); Boq = contractBoqService.GetByProjectNo(ProjectNo); lstNode = Convert(Boq.BoiList); ////初始化批复值 //RootList.ForEach(m => //{ // InitReply(lstNode, m); //}); lstNode.ForEach(m => NodeList.Add(m)); NodeList.ListChanged += OnNodeList_ListChanged; RootList.ForEach(m => m.PropertyChanged += M_PropertyChanged); InitChanged(); }
/// <summary> /// /// </summary> /// <param name="tag"></param> public override void VisitTag(ITag tag) { if (IsTable(tag)) { tables.Add(tag); } else if (IsImageTag(tag)) { m_Images.Add(tag); } else if (IsBodyTag(tag)) { nodesInBody = tag.Children; } else if (IsTitleTag(tag)) { title = ((TitleTag)tag).Title; } }
// // A CSS Selector // // .class > div + h1 // li a:hover // // Selectors are made out of one or more Elements, see above. // public Selector Selector(Parser parser) { Element e; var elements = new NodeList <Element>(); var index = parser.Tokenizer.Location.Index; while (e = Element(parser)) { elements.Add(e); } if (elements.Count > 0) { return(NodeProvider.Selector(elements, index)); } return(null); }
public void InitializeSystemNode() { RootNode.Name = "Root"; RootNode.NameE = "Root"; RootNode.SystemNode = true; RootNode.ElementList.Clear(); RootNode.ElementList.Add(new PmxNode.NodeElement { ElementType = PmxNode.ElementType.Bone, Index = 0 }); ExpNode.Name = "表情"; ExpNode.NameE = "Exp"; ExpNode.SystemNode = true; ExpNode.ElementList.Clear(); NodeList.Clear(); NodeList.Add(RootNode); NodeList.Add(ExpNode); }
// // Expressions either represent mathematical operations, // or white-space delimited Entities. // // 1px solid black // @var * 2 // public Expression Expression(Parser parser) { Node e; var entities = new NodeList(); var index = parser.Tokenizer.Location.Index; while (e = Addition(parser) || Entity(parser)) { entities.Add(e); } if (entities.Count > 0) { return(NodeProvider.Expression(entities, index)); } return(null); }
public WfContent this[string fieldName] { get { var content = Repo.Content.Load(_path); if (content == null) throw new ApplicationException(String.Concat("Content not found: ", _path)); Repo.Field field; if (content.Fields.TryGetValue(fieldName, out field)) { var value = content[fieldName]; var nodeValue = value as Node; if (nodeValue != null) return new WfContent(nodeValue); var enumerableValue = value as System.Collections.IEnumerable; if (enumerableValue != null) { var iter = enumerableValue.GetEnumerator(); if (iter.MoveNext()) { nodeValue = (Node)iter.Current; return new WfContent(nodeValue); } } return null; } throw new ApplicationException(String.Format("Field '{0}' not found in a {1} content: {2} ", fieldName, content.ContentType.Name, content.Path)); } set { var nodes = new NodeList<Node>(); var node = Node.LoadNode(value.Path); nodes.Add(node); var cNode = ContentNode; cNode[fieldName] = nodes; cNode.Save(); //TODO: WF: Write back the timestamp (if the content is the relatedContent) } }
public bool checkIfPath(Transform startObj, Transform endObj) { bool done = false; bool isPath = false; NodeList <string> neighborList = geomanager.neighborsOfNode(startObj.GetComponent <AbstractGeoObj>().figName); NodeList <string> rmNeighborList = new NodeList <string>(); Node <string> target = geomanager.findGraphNode(endObj.GetComponent <AbstractGeoObj>().figName); while (!done) { if (neighborList.Contains(target)) { isPath = true; done = true; } else { int idx = 0; foreach (Node <string> node in neighborList) { foreach (Node <string> node2 in geomanager.neighborsOfNode(node.Value)) { if (!neighborList.Contains(node2) && !rmNeighborList.Contains(node)) { neighborList.Add(node2); neighborList.Remove(node); rmNeighborList.Add(node); idx++; } } } if (idx == 0) { done = true; isPath = neighborList.Contains(target); } } } return(isPath); }
/// <summary> /// Interpret MarkupExpressionStatement /// </summary> /// <param name="statement">MarkupExpressionStatement to interpret</param> public override void Visit(MarkupExpressionStatement statement) { //Iterate through Markup+ ISyntaxNode[] MarkupArray = statement.GetMarkups().ToArray(); for (int i = 0; i <= (MarkupArray.Length - 1); i++) { if (IsMarkupCall((Markup)MarkupArray[i])) { //Check if called function contains an yield, if so, add remaining markups/expression to yield stack String functionIdentifier = ((Markup)MarkupArray[i]).GetDesignator().GetIdentifier(); if (NodeContainsYield(SymbolTable.GetFunctionDefinition(functionIdentifier))) { //Get remaining markups NodeList nonInterpretedMarkups = new NodeList(); for (int j = i + 1; j <= (MarkupArray.Length - 1); j++) { nonInterpretedMarkups.Add(MarkupArray[j]); } //Create new MarkupExpressionStatement and push it to stack MarkupExpressionStatement markupExpressionStatement = new MarkupExpressionStatement(); markupExpressionStatement.SetMarkups(nonInterpretedMarkups); markupExpressionStatement.SetExpression(statement.GetExpression()); PushYieldNode(markupExpressionStatement); } //Interpret markup ((Markup)MarkupArray[i]).AcceptVisitor(this); return; } else { //Interpret Tag ((Markup)MarkupArray[i]).AcceptVisitor(this); } } //Interpret expression statement.GetExpression().AcceptVisitor(this); XHTMLElement element = new XHTMLElement(TextValue, Current); element.SetTagState(false); AddElement(element); }
/// <summary> /// Adds an OPC UA Node to this OPC UA server topology node using the OPC UA node description. /// </summary> public void AddOpcServerNode(ContosoOpcNodeDescription opcUaNodeDescription, List <ContosoPerformanceRelevance> opcUaNodeRelevance) { foreach (var node in NodeList) { if (OpCodeRequiresOpcUaNode(opcUaNodeDescription.OpCode) && node.NodeId == opcUaNodeDescription.NodeId ) { throw new Exception(string.Format("The OPC UA node with NodeId '{0}' and SymbolicName '{1}' does already exist. Please change.", opcUaNodeDescription.NodeId, opcUaNodeDescription.SymbolicName)); } } ContosoOpcUaNode opcUaNodeObject = new ContosoOpcUaNode( opcUaNodeDescription.NodeId, opcUaNodeDescription.SymbolicName, opcUaNodeRelevance, opcUaNodeDescription); NodeList.Add(opcUaNodeObject); }
private NodeList <NodeList <Expression> > ParseArgumentList() { var expressionLists = new NodeList <NodeList <Expression> > (); while (true) { expressionLists.Add(ParseExpressionList()); var nextToken = _scanResult.Peek(); if (nextToken.tokenType == TokenType.Colon) { _scanResult.Next(); continue; } break; } return(expressionLists); }
private static NodeList ApplySelection(NodeList nodeList, string xpath) { Guard.ArgumentNotNullOrEmpty(xpath, "xpath"); if (xpath[0] == '/') { throw new ArgumentException("XPath expressions starting with '/' are not supported", "xpath"); } if (xpath.IndexOf("//") >= 0) { throw new ArgumentException("XPath expressions with '//' are not supported", "xpath"); } string head = xpath; string tail = null; int slash = xpath.IndexOf('/'); if (slash >= 0) { head = xpath.Substring(0, slash); tail = xpath.Substring(slash + 1); } NodeList resultNodes = new NodeList(); NodeFilter filter = new NodeFilter(head); foreach (XmlNode node in nodeList) { foreach (XmlNode childNode in node.ChildNodes) { if (filter.Pass(childNode)) { resultNodes.Add(childNode); } } } return(tail != null ? ApplySelection(resultNodes, tail) : resultNodes); }
public override Node Evaluate(Env env) { NodeList<Element> evaldElements = new NodeList<Element>(); foreach (Element element in Elements) { if (element.NodeValue is Extend) { if (env.MediaPath.Any()) { env.MediaPath.Peek().AddExtension(this, (Extend)(((Extend)element.NodeValue).Evaluate(env)),env); } else //Global extend { env.AddExtension(this, (Extend)(((Extend)element.NodeValue).Evaluate(env)), env); } } else { evaldElements.Add(element.Evaluate(env) as Element); } } var evaluatedSelector = new Selector(evaldElements).ReducedFrom<Selector>(this); if (evaluatedSelector.Elements.All(e => e.NodeValue == null)) { return evaluatedSelector; } parser.Tokenizer.SetupInput(evaluatedSelector.ToCSS(env), ""); var result = new NodeList<Selector>(); Selector selector; while (selector = parsers.Selector(parser)) { selector.IsReference = IsReference; result.Add(selector.Evaluate(env) as Selector); if (!parser.Tokenizer.Match(',')) { break; } } return result; }
/// <summary> /// Adds an OPC UA Node to this OPC UA server topology node. /// </summary> public void AddOpcServerNode( string opcUaNodeId, string opcUaSymbolicName, List <ContosoPerformanceRelevance> opcUaNodeRelevance, ContosoOpcNodeOpCode opCode, string units, bool visible, double?constValue, double?minimum, double?maximum, List <ContosoAlertActionDefinition> minimumAlertActionDefinitions, List <ContosoAlertActionDefinition> maximumAlertActionDefinitions, ContosoPushPinCoordinates imagePushpin, string warning) { foreach (var node in NodeList) { if (OpCodeRequiresOpcUaNode(opCode) && node.NodeId == opcUaNodeId ) { throw new Exception(string.Format("The OPC UA node with NodeId '{0}' and SymbolicName '{1}' does already exist. Please change.", opcUaNodeId, opcUaSymbolicName)); } } ContosoOpcUaNode opcUaNodeObject = new ContosoOpcUaNode( opcUaNodeId, opcUaSymbolicName, opcUaNodeRelevance, opCode, units, visible, constValue, minimum, maximum, minimumAlertActionDefinitions, maximumAlertActionDefinitions, imagePushpin, warning); NodeList.Add(opcUaNodeObject); }
/// <summary> /// 设置配置文件的值 /// </summary> /// <param name="key">关键字</param> /// <param name="attribute">属性名</param> /// <returns></returns> public void SetKeyValue(string key, string Value) { var exit = false; for (var i = 0; i < NodeList.Count; i++) { if (NodeList[i].Key == key) { NodeList[i].Value = Value; exit = true; break; } } if (!exit) { var node = new ClientConfigNode(); node.Key = key; node.Value = Value; NodeList.Add(node); } }
public bool addNode(char city) { bool exist = false; foreach (Node node in NodeList) { if (node.City == city) { exist = true; break; } } if (!exist) { Node n = new Node(city); NodeList.Add(n); } return(!exist); }
/// <summary> /// checks if any of the items in the second list need to react to items in the first list /// recursively adds items from second list to first list if they need to react /// </summary> /// <param name="nodesmovinglist"></param> /// <param name="potentiallyaffectednodes"></param> private static void CheckIfNeighborsAreAffected(NodeList <string> nodesmovinglist, NodeList <string> potentiallyaffectednodes, NodeList <string> newItems, NodeList <string> falseItems, NodeList <string> updateNodeList) { if (potentiallyaffectednodes != null) { foreach (Node <string> neighbor in potentiallyaffectednodes.Where(p => !(newItems.Contains(p) || falseItems.Contains(p)))) { UpdatableFigure neighborUF = (UpdatableFigure)neighbor.mytransform.GetComponent <AbstractGeoObj>(); if (neighborUF.reactMotion(nodesmovinglist)) { updateNodeList.Add(neighbor); nodesmovinglist.Add(neighbor); newItems.Add(neighbor); } else { falseItems.Add(neighbor); } } } }
private NodeList<CartoRule> CreateCopy(NodeList<CartoRule> rules) { NodeList<CartoRule> list = new NodeList<CartoRule>(rules.Count); for (int j = 0; j < rules.Count; j++) list.Add((CartoRule)rules[j].Clone()); return list; }
public Expression MediaFeature(Parser parser) { NodeList features = new NodeList(); var outerIndex = parser.Tokenizer.Location.Index; while (true) { GatherComments(parser); var keyword = Keyword(parser); if (keyword) { keyword.PreComments = PullComments(); keyword.PostComments = GatherAndPullComments(parser); features.Add(keyword); } else if (parser.Tokenizer.Match('(')) { GatherComments(parser); var memo = Remember(parser); var index = parser.Tokenizer.Location.Index; var property = Property(parser); var preComments = GatherAndPullComments(parser); // in order to support (color) and have rule/*comment*/: we need to keep : // out of property if (!string.IsNullOrEmpty(property) && !parser.Tokenizer.Match(':')) { Recall(parser, memo); property = null; } GatherComments(parser); var entity = Entity(parser); if (parser.Tokenizer.Match(')')) { if (!entity) { return null; } entity.PreComments = PullComments(); entity.PostComments = GatherAndPullComments(parser); if (!string.IsNullOrEmpty(property)) { var rule = NodeProvider.Rule(property, entity, index); rule.IsSemiColonRequired = false; features.Add(NodeProvider.Paren(rule, index)); } else { features.Add(NodeProvider.Paren(entity, index)); } } else return null; } else { break; } } if (features.Count == 0) return null; return NodeProvider.Expression(features, outerIndex); }
public Directive KeyFrameBlock(Parser parser, string name, string identifier, int index) { if (!parser.Tokenizer.Match('{')) return null; NodeList keyFrames = new NodeList(); const string identifierRegEx = "from|to|([0-9\\.]+%)"; while (true) { GatherComments(parser); string keyFrameIdentifier; var keyFrameIdentifier1 = parser.Tokenizer.Match(identifierRegEx); if (!keyFrameIdentifier1) break; keyFrameIdentifier = keyFrameIdentifier1.Value; if (parser.Tokenizer.Match(",")) { var keyFrameIdentifier2 = parser.Tokenizer.Match(identifierRegEx); if (!keyFrameIdentifier2) throw new ParsingException("Comma in @keyframe followed by unknown identifier", parser.Tokenizer.Location.Index); keyFrameIdentifier += "," + keyFrameIdentifier2; } var preComments = GatherAndPullComments(parser); var block = Block(parser); if (block == null) throw new ParsingException("Expected css block after key frame identifier", parser.Tokenizer.Location.Index); block.PreComments = preComments; block.PostComments = GatherAndPullComments(parser); keyFrames.Add(NodeProvider.KeyFrame(keyFrameIdentifier, block, parser.Tokenizer.Location.Index)); } if (!parser.Tokenizer.Match('}')) throw new ParsingException("Expected start, finish, % or '}'", parser.Tokenizer.Location.Index); return NodeProvider.Directive(name, identifier, keyFrames, index); }
public override Node Evaluate(Env env) { var found = false; var closures = env.FindRulesets(Selector); if(closures == null) throw new ParsingException(Selector.ToCSS(env).Trim() + " is undefined", Index); env.Rule = this; var rules = new NodeList(); if (PreComments) rules.Add(PreComments); foreach (var closure in closures) { var ruleset = closure.Ruleset; if (!ruleset.MatchArguments(Arguments, env)) continue; found = true; if (ruleset is MixinDefinition) { try { var mixin = ruleset as MixinDefinition; rules.AddRange(mixin.Evaluate(Arguments, env, closure.Context).Rules); } catch (ParsingException e) { throw new ParsingException(e.Message, e.Index, Index); } } else { if (ruleset.Rules != null) { var nodes = new NodeList(ruleset.Rules); NodeHelper.ExpandNodes<MixinCall>(env, nodes); rules.AddRange(nodes); } } } if (PostComments) rules.Add(PostComments); env.Rule = null; if (!found) { var message = String.Format("No matching definition was found for `{0}({1})`", Selector.ToCSS(env).Trim(), StringExtensions.JoinStrings(Arguments.Select(a => a.Value.ToCSS(env)), ", ")); throw new ParsingException(message, Index); } return rules; }
public Value Font(Parser parser) { var value = new NodeList(); var expression = new NodeList(); Node e; var index = parser.Tokenizer.Location.Index; while (e = Shorthand(parser) || Entity(parser)) { expression.Add(e); } value.Add(NodeProvider.Expression(expression, index)); if (parser.Tokenizer.Match(',')) { while (e = Expression(parser)) { value.Add(e); if (!parser.Tokenizer.Match(',')) break; } } return NodeProvider.Value(value, Important(parser), index); }
// // Expressions either represent mathematical operations, // or white-space delimited Entities. // // 1px solid black // @var * 2 // public Expression Expression(Parser parser) { Node e; var entities = new NodeList(); var index = parser.Tokenizer.Location.Index; while (e = Addition(parser) || Entity(parser)) { e.PostComments = PullComments(); entities.Add(e); } if (entities.Count > 0) return NodeProvider.Expression(entities, index); return null; }
public virtual NodeList VisitNodeList(NodeList nodeList, NodeList changes, NodeList deletions, NodeList insertions){ if (changes == null || deletions == null || insertions == null) return nodeList; int n = nodeList == null ? 0 : nodeList.Count; if (n > changes.Count){Debug.Assert(false); n = changes.Count;} if (n > deletions.Count){Debug.Assert(false); n = deletions.Count;} if (n > insertions.Count){Debug.Assert(false); n = insertions.Count;} if (nodeList != null) for (int i = 0; i < n; i++) nodeList[i] = this.Visit(nodeList[i], changes[i], deletions[i], insertions[i]); NodeList result = new NodeList(insertions.Count-n); for (int i = n, m = insertions.Count; i < m; i++) result.Add(insertions[i]); return result; }
public override Node Evaluate(Env env) { var found = false; var closures = env.FindRulesets(Selector); if(closures == null) throw new ParsingException(Selector.ToCSS(env).Trim() + " is undefined", Location); env.Rule = this; var rules = new NodeList(); if (PreComments) rules.AddRange(PreComments); foreach (var closure in closures) { var ruleset = closure.Ruleset; var matchType = ruleset.MatchArguments(Arguments, env); if (matchType == MixinMatch.ArgumentMismatch) continue; found = true; if (matchType == MixinMatch.GuardFail) continue; if (ruleset is MixinDefinition) { try { var mixin = ruleset as MixinDefinition; rules.AddRange(mixin.Evaluate(Arguments, env, closure.Context).Rules); } catch (ParsingException e) { throw new ParsingException(e.Message, e.Location, Location); } } else { if (ruleset.Rules != null) { var nodes = new NodeList(ruleset.Rules); NodeHelper.ExpandNodes<MixinCall>(env, nodes); rules.AddRange(nodes); } } } if (PostComments) rules.AddRange(PostComments); env.Rule = null; if (!found) { var message = String.Format("No matching definition was found for `{0}({1})`", Selector.ToCSS(env).Trim(), Arguments.Select(a => a.Value.ToCSS(env)).JoinStrings(env.Compress ? "," : ", ")); throw new ParsingException(message, Location); } if (Important) { var importantRules = new NodeList(); foreach (Node node in rules) { Rule r = node as Rule; if (r != null) { var valueNode = r.Value; var value = valueNode as Value; value = value != null ? new Value(value.Values, "!important").ReducedFrom<Value>(value) : new Value(new NodeList {valueNode}, "!important"); importantRules.Add((new Rule(r.Name, value)).ReducedFrom<Rule>(r)); } else { importantRules.Add(node); } } return importantRules; } return rules; }
/// <summary> /// Reads a network definition from XML. /// An activation function library is required to decode the function ID at each node, typically the /// library is stored alongside the network definition XML and will have already been read elsewhere and /// passed in here. /// </summary> /// <param name="xr">The XmlReader to read from.</param> /// <param name="activationFnLib">The activation function library used to decode node activation function IDs.</param> /// <param name="nodeFnIds">Indicates if node activation function IDs should be read. They are required /// for HyperNEAT genomes but not NEAT</param> public static NetworkDefinition ReadNetworkDefinition(XmlReader xr, IActivationFunctionLibrary activationFnLib, bool nodeFnIds) { // Find <Network>. XmlIoUtils.MoveToElement(xr, false, __ElemNetwork); int initialDepth = xr.Depth; // Find <Nodes>. XmlIoUtils.MoveToElement(xr, true, __ElemNodes); // Create a reader over the <Nodes> sub-tree. int inputNodeCount = 0; int outputNodeCount = 0; NodeList nodeList = new NodeList(); using(XmlReader xrSubtree = xr.ReadSubtree()) { // Re-scan for the root <Nodes> element. XmlIoUtils.MoveToElement(xrSubtree, false); // Move to first node elem. XmlIoUtils.MoveToElement(xrSubtree, true, __ElemNode); // Read node elements. do { NodeType nodeType = ReadAttributeAsNodeType(xrSubtree, __AttrType); uint id = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrId); int fnId = 0; double[] auxState = null; if(nodeFnIds) { // Read activation fn ID. fnId = XmlIoUtils.ReadAttributeAsInt(xrSubtree, __AttrActivationFunctionId); // Read aux state as comma separated list of real values. auxState = XmlIoUtils.ReadAttributeAsDoubleArray(xrSubtree, __AttrAuxState); } // TODO: Read node aux state data. NetworkNode node = new NetworkNode(id, nodeType, fnId, auxState); nodeList.Add(node); // Track the number of input and output nodes. switch(nodeType) { case NodeType.Input: inputNodeCount++; break; case NodeType.Output: outputNodeCount++; break; } } while(xrSubtree.ReadToNextSibling(__ElemNode)); } // Find <Connections>. XmlIoUtils.MoveToElement(xr, false, __ElemConnections); // Create a reader over the <Connections> sub-tree. ConnectionList connList = new ConnectionList(); using(XmlReader xrSubtree = xr.ReadSubtree()) { // Re-scan for the root <Connections> element. XmlIoUtils.MoveToElement(xrSubtree, false); // Move to first connection elem. string localName = XmlIoUtils.MoveToElement(xrSubtree, true); if(localName == __ElemConnection) { // We have at least one connection. // Read connection elements. do { uint srcId = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrSourceId); uint tgtId = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrTargetId); double weight = XmlIoUtils.ReadAttributeAsDouble(xrSubtree, __AttrWeight); NetworkConnection conn = new NetworkConnection(srcId, tgtId, weight); connList.Add(conn); } while(xrSubtree.ReadToNextSibling(__ElemConnection)); } } // Move the reader beyond the closing tags </Connections> and </Network>. do { if (xr.Depth <= initialDepth) { break; } } while(xr.Read()); // Construct and return loaded network definition. return new NetworkDefinition(inputNodeCount, outputNodeCount, activationFnLib, nodeList, connList); }
// // A Mixin definition, with a list of parameters // // .rounded (@radius: 2px, @color) { // ... // } // // Until we have a finer grained state-machine, we have to // do a look-ahead, to make sure we don't have a mixin call. // See the `rule` function for more information. // // We start by matching `.rounded (`, and then proceed on to // the argument list, which has optional default values. // We store the parameters in `params`, with a `value` key, // if there is a value, such as in the case of `@radius`. // // Once we've got our params list, and a closing `)`, we parse // the `{...}` block. // public MixinDefinition MixinDefinition(Parser parser) { if ((parser.Tokenizer.CurrentChar != '.' && parser.Tokenizer.CurrentChar != '#') || parser.Tokenizer.Peek(@"[^{]*(;|})")) return null; var index = parser.Tokenizer.Location.Index; var memo = Remember(parser); var match = parser.Tokenizer.Match(@"([#.](?:[\w-]|\\(?:[a-fA-F0-9]{1,6} ?|[^a-fA-F0-9]))+)\s*\("); if (!match) return null; //mixin definition ignores comments before it - a css hack can't be part of a mixin definition, //so it may as well be a rule before the definition PushComments(); GatherAndPullComments(parser); // no store as mixin definition not output var name = match[1]; var parameters = new NodeList<Rule>(); RegexMatchResult param = null; Node param2 = null; Func<bool> matchParam = () => (param = parser.Tokenizer.Match(@"@[\w-]+")) || (param2 = Literal(parser) || Keyword(parser)); for (var i = parser.Tokenizer.Location.Index; matchParam(); i = parser.Tokenizer.Location.Index) { if (param != null) { GatherAndPullComments(parser); if (parser.Tokenizer.Match(':')) { GatherComments(parser); var value = Expression(parser); if (value) parameters.Add(NodeProvider.Rule(param.Value, value, i)); else throw new ParsingException("Expected value", i); } else parameters.Add(NodeProvider.Rule(param.Value, null, i)); } else { parameters.Add(NodeProvider.Rule(null, param2, i)); } GatherAndPullComments(parser); if (!parser.Tokenizer.Match(',')) break; GatherAndPullComments(parser); } if (!parser.Tokenizer.Match(')')) throw new ParsingException("Expected ')'", parser.Tokenizer.Location.Index); GatherAndPullComments(parser); var rules = Block(parser); PopComments(); if (rules != null) return NodeProvider.MixinDefinition(name, parameters, rules, index); Recall(parser, memo); return null; }
// // Mixins // // // A Mixin call, with an optional argument list // // #mixins > .square(#fff); // .rounded(4px, black); // .button; // // The `while` loop is there because mixins can be // namespaced, but we only support the child and descendant // selector for now. // public MixinCall MixinCall(Parser parser) { var elements = new NodeList<Element>(); var index = parser.Tokenizer.Location.Index; RegexMatchResult e; Combinator c = null; PushComments(); for (var i = parser.Tokenizer.Location.Index; e = parser.Tokenizer.Match(@"[#.][a-zA-Z0-9_-]+"); i = parser.Tokenizer.Location.Index) { elements.Add(NodeProvider.Element(c, e.Value, i)); i = parser.Tokenizer.Location.Index; var match = parser.Tokenizer.Match('>'); c = match != null ? NodeProvider.Combinator(match.Value, i) : null; } var args = new List<NamedArgument>(); if (parser.Tokenizer.Match('(')) { Expression arg; while (arg = Expression(parser)) { var value = arg; string name = null; if (arg.Value.Count == 1 && arg.Value[0] is Variable) { if (parser.Tokenizer.Match(':')) { if (value = Expression(parser)) name = (arg.Value[0] as Variable).Name; else throw new ParsingException("Expected value", parser.Tokenizer.Location.Index); } } args.Add(new NamedArgument { Name = name, Value = value }); if (!parser.Tokenizer.Match(',')) break; } if (!parser.Tokenizer.Match(')')) throw new ParsingException("Expected ')'", parser.Tokenizer.Location.Index); } if (elements.Count > 0) { // if elements then we've picked up chars so don't need to worry about remembering var postComments = GatherAndPullComments(parser); if (End(parser)) { var mixinCall = NodeProvider.MixinCall(elements, args, index); mixinCall.PostComments = postComments; PopComments(); return mixinCall; } } PopComments(); return null; }
public NodeList<Node> Arguments(Parser parser) { var args = new NodeList<Node>(); Node arg; while ((arg = Assignment(parser)) || (arg = Expression(parser))) { args.Add(arg); if (!parser.Tokenizer.Match(',')) break; } return args; }
public virtual Differences VisitNodeList(NodeList list1, NodeList list2, out NodeList changes, out NodeList deletions, out NodeList insertions){ changes = list1 == null ? null : list1.Clone(); deletions = list1 == null ? null : list1.Clone(); insertions = list1 == null ? new NodeList() : list1.Clone(); //^ assert insertions != null; Differences differences = new Differences(); for (int j = 0, n = list2 == null ? 0 : list2.Count; j < n; j++){ //^ assert list2 != null; Node nd2 = list2[j]; if (nd2 == null) continue; insertions.Add(null); } TrivialHashtable savedDifferencesMapFor = this.differencesMapFor; this.differencesMapFor = null; TrivialHashtable matchedNodes = new TrivialHashtable(); for (int i = 0, k = 0, n = list1 == null ? 0 : list1.Count; i < n; i++){ //^ assert list1 != null && changes != null && deletions != null; Node nd1 = list1[i]; if (nd1 == null) continue; Differences diff; int j; Node nd2 = this.GetClosestMatch(nd1, list1, list2, i, ref k, matchedNodes, out diff, out j); if (nd2 == null || diff == null){Debug.Assert(nd2 == null && diff == null); continue;} matchedNodes[nd1.UniqueKey] = nd1; matchedNodes[nd2.UniqueKey] = nd2; changes[i] = diff.Changes as Node; deletions[i] = diff.Deletions as Node; insertions[i] = diff.Insertions as Node; insertions[n+j] = nd1; //Records the position of nd2 in list2 in case the change involved a permutation Debug.Assert(diff.Changes == changes[i] && diff.Deletions == deletions[i] && diff.Insertions == insertions[i]); differences.NumberOfDifferences += diff.NumberOfDifferences; differences.NumberOfSimilarities += diff.NumberOfSimilarities; } //Find deletions for (int i = 0, n = list1 == null ? 0 : list1.Count; i < n; i++){ //^ assert list1 != null && changes != null && deletions != null; Node nd1 = list1[i]; if (nd1 == null) continue; if (matchedNodes[nd1.UniqueKey] != null) continue; changes[i] = null; deletions[i] = nd1; insertions[i] = null; differences.NumberOfDifferences += 1; } //Find insertions for (int j = 0, n = list1 == null ? 0 : list1.Count, m = list2 == null ? 0 : list2.Count; j < m; j++){ //^ assert list2 != null; Node nd2 = list2[j]; if (nd2 == null) continue; if (matchedNodes[nd2.UniqueKey] != null) continue; insertions[n+j] = nd2; //Records nd2 as an insertion into list1, along with its position in list2 differences.NumberOfDifferences += 1; //REVIEW: put the size of the tree here? } if (differences.NumberOfDifferences == 0){ changes = null; deletions = null; insertions = null; } this.differencesMapFor = savedDifferencesMapFor; return differences; }
public Ruleset EvaluateParams(Env env, List<NamedArgument> args) { var arguments = new Dictionary<string, Node>(); args = args ?? new List<NamedArgument>(); var hasNamedArgs = false; foreach (var arg in args) { if (!string.IsNullOrEmpty(arg.Name)) { hasNamedArgs = true; arguments[arg.Name] = new Rule(arg.Name, arg.Value.Evaluate(env)) { Location = arg.Value.Location }; } else if (hasNamedArgs) throw new ParsingException("Positional arguments must appear before all named arguments.", arg.Value.Location); } for (var i = 0; i < Params.Count; i++) { if (String.IsNullOrEmpty(Params[i].Name)) continue; if (arguments.ContainsKey(Params[i].Name)) continue; Node val; if (i < args.Count && string.IsNullOrEmpty(args[i].Name)) val = args[i].Value; else { //evaluate in scope of mixin definition? val = Params[i].Value; } if (val) { Node argRuleValue; if (Params[i].Variadic) { NodeList varArgs = new NodeList(); for (int j = i; j < args.Count; j++) { varArgs.Add(args[j].Value.Evaluate(env)); } argRuleValue = (new Expression(varArgs)).Evaluate(env); } else { argRuleValue = val.Evaluate(env); } arguments[Params[i].Name] = new Rule(Params[i].Name, argRuleValue) { Location = val.Location }; } else throw new ParsingException( String.Format("wrong number of arguments for {0} ({1} for {2})", Name, args != null ? args.Count : 0, _arity), Location); } var argumentNodes = new List<Node>(); for(var i = 0; i < Math.Max(Params.Count, args.Count); i++) { argumentNodes.Add(i < args.Count ? args[i].Value : Params[i].Value); } var frame = new Ruleset(new NodeList<Selector>(), new NodeList()); frame.Rules.Insert(0, new Rule("@arguments", new Expression(argumentNodes.Where(a => a != null)).Evaluate(env))); foreach (var arg in arguments) { frame.Rules.Add(arg.Value); } return frame; }
public Expression MediaFeature(Parser parser) { NodeList features = new NodeList(); var outerIndex = parser.Tokenizer.Location.Index; while (true) { GatherComments(parser); var keyword = Keyword(parser); if (keyword) { keyword.PreComments = PullComments(); keyword.PostComments = GatherAndPullComments(parser); features.Add(keyword); } else if (parser.Tokenizer.Match('(')) { GatherComments(parser); var memo = Remember(parser); var index = parser.Tokenizer.Location.Index; var property = Property(parser); var preComments = GatherAndPullComments(parser); // in order to support (color) and have rule/*comment*/: we need to keep : // out of property if (!string.IsNullOrEmpty(property) && !parser.Tokenizer.Match(':')) { Recall(parser, memo); property = null; } GatherComments(parser); memo = Remember(parser); var entity = Entity(parser); if (!entity || !parser.Tokenizer.Match(')')) { Recall(parser, memo); // match "3/2" for instance var unrecognised = parser.Tokenizer.Match(@"[^\){]+"); if (unrecognised) { entity = NodeProvider.TextNode(unrecognised.Value, parser.Tokenizer.Location.Index); } if (!unrecognised || !parser.Tokenizer.Match(')')) throw new ParsingException("missing closing bracket for media feature", index); } if (!entity) { return null; } entity.PreComments = PullComments(); entity.PostComments = GatherAndPullComments(parser); if (!string.IsNullOrEmpty(property)) { var rule = NodeProvider.Rule(property, entity, index); rule.IsSemiColonRequired = false; features.Add(NodeProvider.Paren(rule, index)); } else { features.Add(NodeProvider.Paren(entity, index)); } } else { break; } } if (features.Count == 0) return null; return NodeProvider.Expression(features, outerIndex); }
// // The `primary` rule is the *entry* and *exit* point of the parser. // The rules here can appear at any level of the parse tree. // // The recursive nature of the grammar is an interplay between the `block` // rule, which represents `{ ... }`, the `ruleset` rule, and this `primary` rule, // as represented by this simplified grammar: // // primary → (ruleset | rule)+ // ruleset → selector+ block // block → '{' primary '}' // // Only at one point is the primary rule not called from the // block rule: at the root level. // public NodeList Primary(Parser parser) { Node node; var root = new NodeList(); NodeList comments = null; GatherComments(parser); while (node = MixinDefinition(parser) || Rule(parser) || PullComments() || Ruleset(parser) || MixinCall(parser) || Directive(parser)) { if (comments = PullComments()) { root.AddRange(comments); } comments = node as NodeList; if (comments) { foreach (Comment c in comments) { c.IsPreSelectorComment = true; } root.AddRange(comments); } else root.Add(node); GatherComments(parser); } return root; }
// // A Value is a comma-delimited list of Expressions // // font-family: Baskerville, Georgia, serif; // // In a Rule, a Value represents everything after the `:`, // and before the `;`. // public Value Value(Parser parser) { var expressions = new NodeList(); var index = parser.Tokenizer.Location.Index; Node e; while (e = Expression(parser)) { expressions.Add(e); if (!parser.Tokenizer.Match(',')) break; } GatherComments(parser); var important = Important(parser); if (expressions.Count > 0) { var value = NodeProvider.Value(expressions, important, index); if (!string.IsNullOrEmpty(important)) { value.PreImportantComments = PullComments(); } return value; } return null; }
// // A CSS Selector // // .class > div + h1 // li a:hover // // Selectors are made out of one or more Elements, see above. // public Selector Selector(Parser parser) { Element e; int realElements = 0; var elements = new NodeList<Element>(); var index = parser.Tokenizer.Location.Index; GatherComments(parser); PushComments(); while (true) { e = Element(parser); if (!e) break; realElements++; elements.Add(e); } if (realElements > 0) { var selector = NodeProvider.Selector(elements, index); selector.PostComments = GatherAndPullComments(parser); PopComments(); selector.PreComments = PullComments(); return selector; } PopComments(); //We have lost comments we have absorbed here. //But comments should be absorbed before selectors... return null; }
public NodeList<Expression> Arguments(Parser parser) { var args = new NodeList<Expression>(); Expression arg; while (arg = Expression(parser)) { args.Add(arg); if (!parser.Tokenizer.Match(',')) break; } return args; }