private void HandleAstTreeViewSelectedItemChanged(object sender, RoutedPropertyChangedEventArgs <object> e) { CommonTree node = AstTreeView.SelectedItem as CommonTree; if (node == null) { return; } CommonToken a = (CommonToken)currentTemplate.impl.tokens.Get(node.TokenStartIndex); CommonToken b = (CommonToken)currentTemplate.impl.tokens.Get(node.TokenStopIndex); if (a == null || b == null) { return; } Highlight(TemplateTextBox.Document, Interval.FromBounds(a.StartIndex, b.StopIndex + 1)); }
public static void Main(string[] args) { Syms.init("calc.tokens"); string gdata; using (var r = new StreamReader("terminals.txt")) { gdata = r.ReadToEnd(); } var tokenizer = new Tokenizer(gdata); string idata = Console.ReadLine(); tokenizer.setInput(idata); IList <IToken> tokens = new List <IToken>(); while (true) { Token t = tokenizer.next(); if (t.Symbol == "$") { break; //at end } //CommonToken is defined in the ANTLR runtime CommonToken T = new CommonToken(Syms.stringToInt[t.Symbol], t.Lexeme); T.Line = t.Line; tokens.Add(T); } var antlrtokenizer = new BufferedTokenStream(new ListTokenSource(tokens)); var parser = new calcParser(antlrtokenizer); parser.BuildParseTree = true; //optional: parser.ErrorHandler = new BailErrorStrategy (); //'start' should be the name of the grammar's start symbol var listener = new MyListener(); var walker = new ParseTreeWalker(); var antlrroot = parser.start(); walker.Walk(listener, antlrroot); double v = Annotations.ptp.Get(antlrroot); Console.WriteLine($"{v}"); }
public virtual IToken NextToken() { if (grammar.type != GrammarType.Lexer) { return(null); } if (input.LA(1) == CharStreamConstants.EndOfFile) { return(new CommonToken((ICharStream)input, CharStreamConstants.EndOfFile, TokenChannels.Default, input.Index, input.Index)); } int start = input.Index; int charPos = ((ICharStream)input).CharPositionInLine; CommonToken token = null; while (input.LA(1) != CharStreamConstants.EndOfFile) { try { token = Scan(Grammar.ArtificialTokensRuleName, null); break; } catch (RecognitionException re) { // report a problem and try for another ReportScanError(re); continue; } } // the scan can only set type // we must set the line, and other junk here to make it a complete token int stop = input.Index - 1; if (token == null) { return(new CommonToken((ICharStream)input, CharStreamConstants.EndOfFile, TokenChannels.Default, start, start)); } token.Line = (((ICharStream)input).Line); token.StartIndex = start; token.StopIndex = stop; token.CharPositionInLine = charPos; return(token); }
protected override object GetMissingSymbol( IIntStream input, RecognitionException e, int expectedTokenType, BitSet follow ) { string tokenText = null; if ( expectedTokenType == TokenConstants.EOF ) tokenText = "<missing EOF>"; else tokenText = "<missing " + GetTokenNames()[expectedTokenType] + ">"; CommonToken t = new CommonToken( expectedTokenType, tokenText ); IToken current = ( (ITokenStream)input ).LT( 1 ); if ( current.Type == TokenConstants.EOF ) { current = ( (ITokenStream)input ).LT( -1 ); } t.Line = current.Line; t.CharPositionInLine = current.CharPositionInLine; t.Channel = DEFAULT_TOKEN_CHANNEL; return t; }
/// <summary> /// Replace any subtree siblings of root that are completely to left /// or right of lookahead range with a CommonToken(Token.INVALID_TYPE,"...") /// node. /// </summary> /// <remarks> /// Replace any subtree siblings of root that are completely to left /// or right of lookahead range with a CommonToken(Token.INVALID_TYPE,"...") /// node. The source interval for t is not altered to suit smaller range! /// WARNING: destructive to t. /// </remarks> /// <since>4.5.1</since> public static void StripChildrenOutOfRange(ParserRuleContext t, ParserRuleContext root, int startIndex, int stopIndex) { if (t == null) { return; } for (int i = 0; i < t.ChildCount; i++) { IParseTree child = t.GetChild(i); Interval range = child.SourceInterval; if (child is ParserRuleContext && (range.b < startIndex || range.a > stopIndex)) { if (IsAncestorOf(child, root)) { // replace only if subtree doesn't have displayed root CommonToken abbrev = new CommonToken(TokenConstants.InvalidType, "..."); t.children.Set(i, new TerminalNodeImpl(abbrev)); } } } }
protected override object GetMissingSymbol( IIntStream input, RecognitionException e, int expectedTokenType, BitSet follow ) { string tokenText = null; if ( expectedTokenType == TokenTypes.EndOfFile ) tokenText = "<missing EOF>"; else tokenText = "<missing " + TokenNames[expectedTokenType] + ">"; CommonToken t = new CommonToken( expectedTokenType, tokenText ); IToken current = ( (ITokenStream)input ).LT( 1 ); if ( current.Type == TokenTypes.EndOfFile ) { current = ( (ITokenStream)input ).LT( -1 ); } t.Line = current.Line; t.CharPositionInLine = current.CharPositionInLine; t.Channel = DefaultTokenChannel; return t; }
public bool AcctOaStatus(AcctOAStatusParam pars) { var token = CommonToken.GetToken(); var http = new ArrowInterface.ArrowInterface(); pars.acctCode = ConfigurationManager.AppSettings["dealerCode"]; var result = http.AcctOaStatus(token.Token, pars); var Helper = new OracleDBHelper(); if (result.Success) { foreach (var row in result.Rows.AsParallel()) { var conn = Helper.GetNewConnection(); conn.Open(); var tran = conn.BeginTransaction(); try { Helper.DeleteWithTran(row, tran); Helper.DeleteWithTran <AcctOAStatusDetailed>($" AND ORDERNO='{row.orderNo}'", tran); Helper.InsertWithTransation(row, tran); } catch (Exception e) { tran.Rollback(); conn.Close(); var message = $"OA同步结果插入失败:{JsonConvert.SerializeObject(row)}"; LogHelper.Info(message); LogHelper.Error(e); return(false); } tran.Commit(); } } return(result.Success); }
/// <summary> /// This reads the next token from the input. /// </summary> /// <remarks> /// If there is a lexing error, this method inserts an error token /// containing the bad input, and continues lexing. /// </remarks> /// <returns>The next token in the input.</returns> public override IToken NextToken() { try { return(base.NextToken()); } catch (RecognitionException re) { if (re is NoViableAltException) { Recover(re); } IToken tknErrorToken = new CommonToken(input, 0, TokenChannels.Default, state.tokenStartCharIndex, input.Index - 1); tknErrorToken.Line = state.tokenStartLine; tknErrorToken.CharPositionInLine = state.tokenStartCharPositionInLine; Emit(tknErrorToken); return(state.token); } }
/** * Given an alternative associated with a DFA state, return the list of * tokens (from grammar) associated with path through NFA following the * labels sequence. The nfaStates gives the set of NFA states associated * with alt that take us from start to stop. One of the NFA states in * nfaStates[i] will have an edge intersecting with labels[i]. */ public List <IToken> GetGrammarLocationsForInputSequence(List <HashSet <NFAState> > nfaStates, List <IIntSet> labels) { List <IToken> tokens = new List <IToken>(); for (int i = 0; i < nfaStates.Count - 1; i++) { HashSet <NFAState> cur = nfaStates[i]; HashSet <NFAState> next = nfaStates[i + 1]; IIntSet label = labels[i]; // find NFA state with edge whose label matches labels[i] foreach (NFAState p in cur) { // walk p's transitions, looking for label for (int j = 0; j < p.NumberOfTransitions; j++) { Transition t = p.transition[j]; if (!t.IsEpsilon && !t.Label.Set.And(label).IsNil&& next.Contains((NFAState)t.Target)) { if (p.associatedASTNode != null) { IToken oldtoken = p.associatedASTNode.Token; CommonToken token = new CommonToken(oldtoken.Type, oldtoken.Text); token.Line = oldtoken.Line; token.CharPositionInLine = oldtoken.CharPositionInLine; tokens.Add(token); // found path, move to next NFAState set goto endNfaConfigLoop; } } } } endNfaConfigLoop: continue; } return(tokens); }
void ReplaceNode(ITree parent, int index, object val, SimpleTypeEnum type) { CommonTree node = null; var token = new CommonToken(-1, val.ToString()); switch (type) { case SimpleTypeEnum.Bool: node = new BoolNode(token); break; case SimpleTypeEnum.Byte: node = new ByteNode(token); break; case SimpleTypeEnum.Char: node = new CharNode(token); break; case SimpleTypeEnum.Int: node = new IntegerNode(token); break; case SimpleTypeEnum.Float: node = new FloatNode(token); break; case SimpleTypeEnum.Double: node = new RealNode(token); break; default: throw new ArgumentOutOfRangeException("type", type, null); } parent.ReplaceChildren(index, index, node); }
/** <summary>Return a token from this source; i.e., match a token on the char stream.</summary> */ public virtual IToken NextToken() { for (; ;) { state.token = null; state.channel = TokenChannels.Default; state.tokenStartCharIndex = input.Index; state.tokenStartCharPositionInLine = input.CharPositionInLine; state.tokenStartLine = input.Line; state.text = null; if (input.LA(1) == CharStreamConstants.EndOfFile) { IToken eof = new CommonToken((ICharStream)input, CharStreamConstants.EndOfFile, TokenChannels.Default, input.Index, input.Index); eof.Line = Line; eof.CharPositionInLine = CharPositionInLine; return(eof); } try { mTokens(); if (state.token == null) { Emit(); } else if (state.token == Tokens.Skip) { continue; } return(state.token); } catch (NoViableAltException nva) { ReportError(nva); Recover(nva); // throw out current char and try again } catch (RecognitionException re) { ReportError(re); // match() routine has already called recover() } } }
/// <summary> /// Input an expression token in tree structure, return a single token representing the whole expression /// </summary> /// <param name="rootToken"></param> /// <param name="input"></param> /// <returns></returns> public static IToken GetExpressionToken(CommonTree rootToken, ITreeNodeStream input) { try { ITokenStream stream = ((Antlr.Runtime.Tree.CommonTreeNodeStream)(input)).TokenStream; int start = rootToken.TokenStartIndex; int end = rootToken.TokenStopIndex; IToken token1 = new CommonToken(); //(Token.DEFAULT_CHANNEL token1.CharPositionInLine = stream.Get(start).CharPositionInLine; token1.Line = stream.Get(start).Line; for (int i = start; i <= end; i++) { token1.Text += stream.Get(i).Text; } return(token1); } catch (Exception) { } return(rootToken.Token); }
public override object VisitHtmlElement([NotNull] HtmlElementContext context) { var content = context.htmlContent(); var name = context.htmlTagName(0)?.GetText(); if (name == null || name.ToUpper() != "HEAD") { return(base.VisitHtmlElement(context)); } HtmlElemFactory factory = new HtmlElemFactory(); var space = new CommonToken(SEA_WS, " "); foreach (var item in files) { HtmlElementContext elem = new HtmlElementContext(content, 0); elem.AddChild(new CommonToken(TAG_OPEN, "<")); elem.AddChild(factory.CreateTagName(elem, "link")); elem.AddChild(space); var type = factory.CreateAttribute(elem, "type", "text/css"); elem.AddChild(type); elem.AddChild(space); var rel = factory.CreateAttribute(elem, "rel", "styleSheet"); elem.AddChild(rel); elem.AddChild(space); var href = factory.CreateAttribute(elem, "href", item.FileName); elem.AddChild(href); elem.AddChild(new CommonToken(TAG_SLASH_CLOSE, "/>")); content.AddChild(elem); } return(null); }
/** <summary> * The standard method called to automatically emit a token at the * outermost lexical rule. The token object should point into the * char buffer start..stop. If there is a text override in 'text', * use that to set the token's text. Override this method to emit * custom Token objects. * </summary> * * <remarks> * If you are building trees, then you should also override * Parser or TreeParser.getMissingSymbol(). * </remarks> */ public virtual IToken Emit() { IToken t = new CommonToken( input, state.type, state.channel, state.tokenStartCharIndex, GetCharIndex() - 1 ); t.Line = state.tokenStartLine; t.Text = state.text; t.CharPositionInLine = state.tokenStartCharPositionInLine; Emit( t ); return t; }
public virtual object ParseNode() { // "%label:" prefix string label = null; if ( ttype == TreePatternLexer.PERCENT ) { ttype = tokenizer.NextToken(); if ( ttype != TreePatternLexer.ID ) { return null; } label = tokenizer.sval.ToString(); ttype = tokenizer.NextToken(); if ( ttype != TreePatternLexer.COLON ) { return null; } ttype = tokenizer.NextToken(); // move to ID following colon } // Wildcard? if ( ttype == TreePatternLexer.DOT ) { ttype = tokenizer.NextToken(); IToken wildcardPayload = new CommonToken( 0, "." ); TreeWizard.TreePattern node = new TreeWizard.WildcardTreePattern( wildcardPayload ); if ( label != null ) { node.label = label; } return node; } // "ID" or "ID[arg]" if ( ttype != TreePatternLexer.ID ) { return null; } string tokenName = tokenizer.sval.ToString(); ttype = tokenizer.NextToken(); if ( tokenName.Equals( "nil" ) ) { return adaptor.Nil(); } string text = tokenName; // check for arg string arg = null; if ( ttype == TreePatternLexer.ARG ) { arg = tokenizer.sval.ToString(); text = arg; ttype = tokenizer.NextToken(); } // create node int treeNodeType = wizard.GetTokenType( tokenName ); if ( treeNodeType == TokenTypes.Invalid ) { return null; } object node2; node2 = adaptor.Create( treeNodeType, text ); if ( label != null && node2.GetType() == typeof( TreeWizard.TreePattern ) ) { ( (TreeWizard.TreePattern)node2 ).label = label; } if ( arg != null && node2.GetType() == typeof( TreeWizard.TreePattern ) ) { ( (TreeWizard.TreePattern)node2 ).hasTextArg = true; } return node2; }
public override IToken NextToken() { CommonToken token = (CommonToken)base.NextToken(); if (token.Type == PHPEnd || token.Type == PHPEndSingleLineComment) { if (_mode == SingleLineCommentMode) { // SingleLineCommentMode for such allowed syntax: // <?php echo "Hello world"; // comment ?> PopMode(); // exit from SingleLineComment mode. } PopMode(); // exit from PHP mode. if (string.Equals(token.Text, "</script>", System.StringComparison.Ordinal)) { _phpScript = false; token.Type = HtmlScriptClose; } else { // Add semicolon to the end of statement if it is absente. // For example: <?php echo "Hello world" ?> if (_prevTokenType == SemiColon || _prevTokenType == Colon || _prevTokenType == OpenCurlyBracket || _prevTokenType == CloseCurlyBracket) { token.Channel = SkipChannel; } else { token.Type = SemiColon; } } } else if (token.Type == HtmlName) { _htmlNameText = token.Text; } else if (token.Type == HtmlDoubleQuoteString) { if (string.Equals(token.Text, "php", System.StringComparison.OrdinalIgnoreCase) && string.Equals(_htmlNameText, "language")) { _phpScript = true; } } else if (_mode == HereDoc) { // Heredoc and Nowdoc syntax support: http://php.net/manual/en/language.types.string.php#language.types.string.syntax.heredoc switch (token.Type) { case StartHereDoc: case StartNowDoc: _heredocIdentifier = token.Text.Substring(3).Trim().Trim('\''); break; case HereDocText: if (CheckHeredocEnd(token.Text)) { PopMode(); var heredocIdentifier = GetHeredocIdentifier(token.Text); if (token.Text.Trim().EndsWith(";")) { token.Text = heredocIdentifier + ";\n"; token.Type = SemiColon; } else { token = (CommonToken)base.NextToken(); token.Text = heredocIdentifier + "\n;"; } } break; } } else if (_mode == PHP) { if (_channel != Hidden) { _prevTokenType = token.Type; } } return(token); }
static string PreProcess(IList <IToken> tokens) { int NUMBER = antlrDict[nameof(NUMBER)]; int ID = antlrDict[nameof(ID)]; int PARENTHESIS_OPEN = antlrDict["'('"]; int PARENTHESIS_CLOSE = antlrDict["')'"]; int MULTIPLY = antlrDict["'*'"]; int POWER = antlrDict["'^'"]; const int FUNCTION = -0xFF; const int VARIABLE = -0xEE; tokens = tokens.Where(token => token.Channel == 0).ToList(); bool IsTypeEqual(IToken token, int type) { if (token.Type == ID) { if (SyntaxInfo.goodStringsForFunctions.ContainsKey(token.Text)) { return(type == FUNCTION); } else { return(type == VARIABLE); } } else { return(token.Type == type); } } /// <summary> /// Provided two types of tokens, returns position of first token if /// the pair if found, -1 otherwisely. /// </summary> int FindSubPair(int type1, int type2) { for (int i = 0; i < tokens.Count - 1; i++) { if (IsTypeEqual(tokens[i], type1) && IsTypeEqual(tokens[i + 1], type2)) { return(i); } } return(-1); } /// <summary> /// Finds all occurances of [t1, t2] and inserts token in between each of them /// </summary> void InsertIntoPair(int type1, int type2, IToken token) { int pos; while ((pos = FindSubPair(type1, type2)) != -1) { tokens.Insert(pos + 1 /* we need to keep the first one behind*/, token); } } IToken multiplyer = new CommonToken(MULTIPLY, "*"); IToken power = new CommonToken(POWER, "^"); // 2x -> 2 * x InsertIntoPair(NUMBER, VARIABLE, multiplyer); // x y -> x * y InsertIntoPair(VARIABLE, VARIABLE, multiplyer); // 2( -> 2 * ( InsertIntoPair(NUMBER, PARENTHESIS_OPEN, multiplyer); // )2 -> ) ^ 2 InsertIntoPair(PARENTHESIS_CLOSE, NUMBER, power); // x( -> x * ( InsertIntoPair(VARIABLE, PARENTHESIS_OPEN, multiplyer); // )x -> ) * x InsertIntoPair(PARENTHESIS_CLOSE, VARIABLE, multiplyer); // x2 -> x ^ 2 InsertIntoPair(VARIABLE, NUMBER, power); // 3 2 -> 3 ^ 2 InsertIntoPair(NUMBER, NUMBER, power); // 2sqrt -> 2 * sqrt InsertIntoPair(NUMBER, FUNCTION, multiplyer); // x sqrt -> x * sqrt InsertIntoPair(VARIABLE, FUNCTION, multiplyer); // )sqrt -> ) * sqrt InsertIntoPair(PARENTHESIS_CLOSE, FUNCTION, multiplyer); // )( -> ) * ( // )sqrt -> ) * sqrt InsertIntoPair(PARENTHESIS_CLOSE, PARENTHESIS_OPEN, multiplyer); var builder = new StringBuilder(); tokens.RemoveAt(tokens.Count - 1); // remove <EOF> token foreach (var token in tokens) { builder.Append(token.Text); } return(builder.ToString()); }
public virtual void processToken(int indexIntoRealTokens, int tokenIndexInStream, bool collectAnalysis) { CommonToken curToken = (CommonToken)testDoc.tokens.Get(tokenIndexInStream); string tokText = curToken.Text; TerminalNode node = tokenToNodeMap[curToken]; int[] features = getFeatures(testDoc, tokenIndexInStream); int[] featuresForAlign = new int[features.Length]; Array.Copy(features, 0, featuresForAlign, 0, features.Length); int injectNL_WS = wsClassifier.classify(k, features, Trainer.MAX_WS_CONTEXT_DIFF_THRESHOLD); injectNL_WS = emitCommentsToTheLeft(tokenIndexInStream, injectNL_WS); int newlines = 0; int ws = 0; if ((injectNL_WS & 0xFF) == Trainer.CAT_INJECT_NL) { newlines = Trainer.unnlcat(injectNL_WS); } else if ((injectNL_WS & 0xFF) == Trainer.CAT_INJECT_WS) { ws = Trainer.unwscat(injectNL_WS); } if (newlines == 0 && ws == 0 && cannotJoin(realTokens[indexIntoRealTokens - 1], curToken)) { // failsafe! ws = 1; } int alignOrIndent = Trainer.CAT_ALIGN; if (newlines > 0) { output.Append(Tool.newlines(newlines)); line += newlines; charPosInLine = 0; // getFeatures() doesn't know what line curToken is on. If \n, we need to find exemplars that start a line featuresForAlign[Trainer.INDEX_FIRST_ON_LINE] = 1; // use \n prediction to match exemplars for alignment alignOrIndent = hposClassifier.classify(k, featuresForAlign, Trainer.MAX_ALIGN_CONTEXT_DIFF_THRESHOLD); if ((alignOrIndent & 0xFF) == Trainer.CAT_ALIGN_WITH_ANCESTOR_CHILD) { align(alignOrIndent, node); } else if ((alignOrIndent & 0xFF) == Trainer.CAT_INDENT_FROM_ANCESTOR_CHILD) { indent(alignOrIndent, node); } else if ((alignOrIndent & 0xFF) == Trainer.CAT_ALIGN) { IList <Token> tokensOnPreviousLine = Trainer.getTokensOnPreviousLine(testDoc.tokens, tokenIndexInStream, line); if (tokensOnPreviousLine.Count > 0) { Token firstTokenOnPrevLine = tokensOnPreviousLine[0]; int indentCol = firstTokenOnPrevLine.Column; charPosInLine = indentCol; output.Append(Tool.spaces(indentCol)); } } else if ((alignOrIndent & 0xFF) == Trainer.CAT_INDENT) { indent(alignOrIndent, node); } } else { // inject whitespace instead of \n? output.Append(Tool.spaces(ws)); charPosInLine += ws; } // update Token object with position information now that we are about // to emit it. curToken.Line = line; curToken.Column = charPosInLine; TokenPositionAnalysis tokenPositionAnalysis = getTokenAnalysis(features, featuresForAlign, tokenIndexInStream, injectNL_WS, alignOrIndent, collectAnalysis); analysis[tokenIndexInStream] = tokenPositionAnalysis; int n = tokText.Length; tokenPositionAnalysis.charIndexStart = output.Length; tokenPositionAnalysis.charIndexStop = tokenPositionAnalysis.charIndexStart + n - 1; // emit output.Append(tokText); charPosInLine += n; }
protected virtual void ResolveSqsEscape(CommonToken token, StringBuilder sb) { char c = ResolveSqsEscapeChar(token); sb.Append(c); }
public bool SaleOrderUpload(List <string> billNos) { var auditType = AuditEnums.提交同步; ICPOBILLAuditor auditor = new ICPOBILLAuditor("System", auditType); var err = auditor.CheckOption(billNos, auditType); if (!err.Equals("")) { throw new ArgumentException(err); } var token = CommonToken.GetToken(); var http = new ArrowInterface.ArrowInterface(); var Helper = new OracleDBHelper(); string where = $" AND LHOUTSYSTEMOD IN ('{string.Join("','", billNos)}')"; var bills = Helper.GetWithWhereStr <SaleOrderUploadParam>(where); bills.ForEach(b => { var details = Helper.GetWhere(new SaleOrderUploadDetailedParam() { lHOutSystemID = b.lHOutSystemID }).ToArray(); b.saleOrderItemList = details; }); List <string> errors = new List <string>(); bills.ForEach(b => { var conn = Helper.GetNewConnection(); conn.Open(); var tran = conn.BeginTransaction(); try { var result = http.SaleOrderUpload(token.Token, b); if (result.Success) { var saleorderRepository = new DefaultRepository <Order>(DBTypeEnums.ORACLE); ///返写箭牌销售单号到本地采购订单表ICPOBILL的FDesBillNo字段 var whereStr = $" AND FBILLNO='{b.lHOutSystemOd}'"; var icpobill = Helper.GetWithTranWithWhereStr <ICPOBILLMODEL>(whereStr, tran) .SingleOrDefault(); saleorderRepository.Delete(new { lHOutSystemID = b.lHOutSystemID }, tran); foreach (var row in result.item.AsParallel()) { Helper.InsertWithTransation(row, tran); icpobill.FDesBillNo = row.orderNo; } //更新本地采购订单表ICPOBILL icpobill.FSTATUS = (int)ICPOBILLStatus.关闭; icpobill.FSYNCSTATUS = (int)ICPOBILLSyneStatus.已同步; Helper.UpdateWithTransation(icpobill, tran); } else { errors.Add($"单据【{b.lHOutSystemOd}】上传失败:{result.Message}"); } tran.Commit(); conn.Close(); } catch (Exception e) { tran.Rollback(); conn.Close(); var message = $"销售订单【{b.lHOutSystemOd}】上传失败:{e.Message}"; LogHelper.Info(message); LogHelper.Error(e); throw; } }); if (errors.Count > 0) { throw new Exception(string.Join("\r\n", errors)); } return(true); }
public void Initialize() { var commonToken = new CommonToken(); var commonTree = new CommonTree(); var listStack = new ListStack <string>(); }
public override void ConsumeHiddenToken(IToken token) { //System.out.println("consume hidden token "+token); lastTokenConsumed = (CommonToken)token; }
/** <summary>Return a token from this source; i.e., match a token on the char stream.</summary> */ public virtual IToken NextToken() { for ( ; ; ) { state.token = null; state.channel = TokenChannels.Default; state.tokenStartCharIndex = input.Index; state.tokenStartCharPositionInLine = input.CharPositionInLine; state.tokenStartLine = input.Line; state.text = null; if ( input.LA( 1 ) == CharStreamConstants.EndOfFile ) { IToken eof = new CommonToken((ICharStream)input, CharStreamConstants.EndOfFile, TokenChannels.Default, input.Index, input.Index); eof.Line = Line; eof.CharPositionInLine = CharPositionInLine; return eof; } try { mTokens(); if ( state.token == null ) { Emit(); } else if ( state.token == Tokens.Skip ) { continue; } return state.token; } catch ( NoViableAltException nva ) { ReportError( nva ); Recover( nva ); // throw out current char and try again } catch ( RecognitionException re ) { ReportError( re ); // match() routine has already called recover() } } }
public AuthorizationToken GetToken() { //请求Token return(CommonToken.GetToken()); }
public Token(IParseTree node, CommonToken token) : base(node) { _token = token; }
public bool CheckQuote(string what_rule) { var lexer_state = this.State; var parser_state = Parser.State; var ts = this.RealTokenStream; //System.Console.WriteLine("Rule " + what_rule); //System.Console.WriteLine("lexer state " + lexer_state); //System.Console.WriteLine("parser state " + parser_state); //System.Console.Write("prior tokens: "); //for (int i = magic; i > 0; --i) //{ // try // { // IToken cur = ts.LT(-i); // if (cur != null) // System.Console.WriteLine(cur.ToString()); // } // catch // { // } //} IToken last = ts.LT(-1); var stop = last.StopIndex; CommonToken last_ct = last as CommonToken; var s2 = last_ct.InputStream.Index; IIntStream istr = this.InputStream; var num_chars = s2 - stop - 1; //System.Console.Write("prior characters to last token: "); //for (int i = num_chars; i > 0; --i) //{ // try // { // var cur = istr.LA(-i); // if (cur > 0) // System.Console.Write((char) cur); // } // catch // { // } //} //System.Console.WriteLine(); //System.Console.WriteLine("Parser context:"); //var ctx = Parser.Context; //while (ctx != null) //{ // System.Console.WriteLine("c = " + ctx.GetType().Name); // ctx = ctx.Parent as ParserRuleContext; //} // Heuristic decision making. bool result = false; int quote_count = 0; int last_quote = 0; for (int i = 0; i < num_chars; ++i) { try { var cur = istr.LA(-i); if (cur > 0) { if (cur == (int)'\'') { quote_count++; last_quote = i; } } } catch { } } bool is_operator = true; for (int i = last_quote; i < num_chars; ++i) { try { var cur = istr.LA(-i); if (cur > 0) { var c = (char)cur; if (i == last_quote && c == '\'') { ; } else if (c == ' ' || c == '\t') { ; } else if (Char.IsLetterOrDigit(c)) { is_operator = true; break; } else { is_operator = false; break; } } } catch { } } if (what_rule == "SQ") { result = quote_count == 1 && is_operator; //System.Console.WriteLine(result ? ("yes, it's a " + what_rule + " operator.") : ("no, it's not a " + what_rule + " operator.")); } else { result = (quote_count > 1 && !is_operator); //System.Console.WriteLine(result ? ("yes, it's a " + what_rule) : ("no, it's not a " + what_rule)); } //System.Console.WriteLine(); return(result); }
public static void Main(string[] args) { Syms.init("ssupl.tokens"); string gdata; using (var r = new StreamReader("terminals.txt")) { gdata = r.ReadToEnd(); } var tokenizer = new Tokenizer(gdata); string idata; using (var r = new StreamReader("input.txt")) { idata = r.ReadToEnd(); } var rex = new Regex(@"\n[ \t]+([^\n]+)"); idata = rex.Replace(idata, " $1"); idata += "\n"; tokenizer.setInput(idata); IList <IToken> tokens = new List <IToken>(); while (true) { Token t = tokenizer.next(); if (t.Symbol == "$") { break; } CommonToken T = new CommonToken(Syms.stringToInt[t.Symbol], t.Lexeme); T.Line = t.Line; tokens.Add(T); } var antlrtokenizer = new BufferedTokenStream(new ListTokenSource(tokens)); var parser = new ssuplParser(antlrtokenizer); parser.BuildParseTree = true; parser.ErrorHandler = new BailErrorStrategy(); var antlrroot = parser.start(); var listener = new CodeGenerator(); var walker = new ParseTreeWalker(); walker.Walk(listener, antlrroot); var allcode = new ASM( "default rel", "section .text", "global main", "main:", listener.code.Get(antlrroot), "section .data"); using (var w = new StreamWriter("out.asm")) { w.Write(allcode.ToString()); } }
public ArgumentBag(OclExpression expr, CommonToken start, CommonToken stop) { this.Expression = expr; this.Start = start; this.Stop = stop; }
public static void Main(string[] args) { Syms.init("ssupl.tokens"); string gdata; using (var r = new StreamReader("terminals.txt")) { gdata = r.ReadToEnd(); } var tokenizer = new Tokenizer(gdata); string idata; using (var r = new StreamReader("input.txt")) { idata = r.ReadToEnd(); } tokenizer.setInput(idata); IList <IToken> tokens = new List <IToken>(); while (true) { Token t = tokenizer.next(); //Console.WriteLine("token: " + t); if (t.Symbol == "$") { break; } CommonToken T = new CommonToken(Syms.stringToInt[t.Symbol], t.Lexeme); T.Line = t.Line; tokens.Add(T); } var antlrtokenizer = new BufferedTokenStream(new ListTokenSource(tokens)); var parser = new ssuplParser(antlrtokenizer); parser.BuildParseTree = true; parser.ErrorHandler = new BailErrorStrategy(); var antlrroot = parser.start(); var listener = new CodeGenerator(); var walker = new ParseTreeWalker(); walker.Walk(listener, antlrroot); var allcode = new ASM( "default rel", "section .text", "global main", "main:", listener.code.Get(antlrroot), "section .data"); //This makes the functions and actual program using (var w = new StreamWriter("out.asm")) { //Console.WriteLine("----------------------------"); //Console.WriteLine(allcode); //Console.WriteLine("----------------------------"); w.Write(allcode.ToString()); foreach (var literal in listener.stringPool.Keys) { w.WriteLine("; " + literal.Replace("\n", "\\n")); w.WriteLine(listener.stringPool[literal].address + ":"); w.Write("db "); byte[] b = Encoding.ASCII.GetBytes(literal); for (int i = 0; i < literal.Length; ++i) { w.Write(b[i]); w.Write(","); } w.WriteLine("0"); } } //This makes variables possible in the global data after the program //Literally gets append to the bottom of the out.asm file var symtable = listener.symtable.table; foreach (var sym in symtable) { //Need dq to have 0,0,0,0 for vec4 if (sym.Value.type == VarType.VEC4) { var globaldata = new ASM( $"{sym.Value.location}:", "dq 0", "dq 0", "dq 0", "dq 0"); using (var appendglobals = File.AppendText("out.asm")) { appendglobals.Write(globaldata.ToString()); } } else { var globaldata = new ASM( $"{sym.Value.location}:", "dq 0"); using (var appendglobals = File.AppendText("out.asm")) { appendglobals.Write(globaldata.ToString()); } } } }
public override void ConsumeHiddenToken(IToken token) { //System.out.println("consume hidden token "+token); _lastTokenConsumed = (CommonToken)token; }
public static IParseTree ReconstructTreeAux(Parser grammar, Lexer lexer, IParseTree ast_tree, ParserRuleContext parent) { if (ast_tree == null) { return(null); } // Pre order visit. if (ast_tree as AstParserParser.NodeContext != null) { // Convert. var v = ast_tree as AstParserParser.NodeContext; var id = v.GetChild(1).GetText(); if (id == "TOKEN") { var type_attr = v.GetChild(2); var type_str = type_attr.GetChild(2).GetText(); type_str = type_str.Substring(1, type_str.Length - 2); var type = Int32.Parse(type_str); var text_attr = v.GetChild(3); var text_str = text_attr.GetChild(2).GetText(); text_str = text_str.Substring(1, text_str.Length - 2); var text = text_str; var sym = new CommonToken(type, text); var x = new TerminalNodeImpl(sym); if (parent != null) { parent.AddChild(x); } return(x); } else { // Look up "<id>Context" in grammar. id = id + "Context"; var u = grammar.GetType().GetNestedTypes().Where(t => { if (t.IsClass && t.Name.ToLower() == id.ToLower()) { return(true); } return(false); }); var w = u.FirstOrDefault(); object[] parms = new object[2]; parms[0] = parent; parms[1] = 0; var x = (ParserRuleContext)Activator.CreateInstance(w, parms); if (parent != null) { parent.AddChild(x); } for (int i = 0; i < ast_tree.ChildCount; ++i) { var c = ast_tree.GetChild(i); var eq = ReconstructTreeAux(grammar, lexer, c, x); } return(x); } } else if (ast_tree as AstParserParser.AttrContext != null) { return(null); } else { var tni = ast_tree as TerminalNodeImpl; var sym = tni.Symbol; var pp = sym.GetType().FullName; return(null); } }
public TokenChangeBuilder setNewToken(CommonToken newToken) { this.newToken = newToken; return(this); }
public virtual object ParseNode() { // "%label:" prefix string label = null; if (ttype == TreePatternLexer.Percent) { ttype = tokenizer.NextToken(); if (ttype != TreePatternLexer.Id) { return(null); } label = tokenizer.sval.ToString(); ttype = tokenizer.NextToken(); if (ttype != TreePatternLexer.Colon) { return(null); } ttype = tokenizer.NextToken(); // move to ID following colon } // Wildcard? if (ttype == TreePatternLexer.Dot) { ttype = tokenizer.NextToken(); IToken wildcardPayload = new CommonToken(0, "."); TreeWizard.TreePattern node = new TreeWizard.WildcardTreePattern(wildcardPayload); if (label != null) { node.label = label; } return(node); } // "ID" or "ID[arg]" if (ttype != TreePatternLexer.Id) { return(null); } string tokenName = tokenizer.sval.ToString(); ttype = tokenizer.NextToken(); if (tokenName.Equals("nil")) { return(adaptor.Nil()); } string text = tokenName; // check for arg string arg = null; if (ttype == TreePatternLexer.Arg) { arg = tokenizer.sval.ToString(); text = arg; ttype = tokenizer.NextToken(); } // create node int treeNodeType = wizard.GetTokenType(tokenName); if (treeNodeType == TokenTypes.Invalid) { return(null); } object node2; node2 = adaptor.Create(treeNodeType, text); if (label != null && node2.GetType() == typeof(TreeWizard.TreePattern)) { ((TreeWizard.TreePattern)node2).label = label; } if (arg != null && node2.GetType() == typeof(TreeWizard.TreePattern)) { ((TreeWizard.TreePattern)node2).hasTextArg = true; } return(node2); }
public TokenChange(TokenChangeType changeType, CommonToken oldToken, CommonToken newToken) { this.changeType = changeType; this.oldToken = oldToken; this.newToken = newToken; }
public TokenChangeBuilder setOldToken(CommonToken oldToken) { this.oldToken = oldToken; return(this); }
static void Main(string[] args) { Syms.init("ssupl.tokens"); string gdata; using (var r = new StreamReader("terminals.txt")) { gdata = r.ReadToEnd(); } gdata = gdata.Replace("\r", "\n"); var tokenizer = new Tokenizer(gdata); //existing code string idata; using (var r = new StreamReader("input.txt")) { idata = r.ReadToEnd(); } //new code //var rex = new Regex(@"\r"); idata = idata.Replace("\r", ""); var rex = new Regex(@"\n[ \t]+([^\n]+)"); //need one leading space in replacement idata = rex.Replace(idata, " $1"); idata += "\n"; //make sure file ends with newline tokenizer.setInput(idata); IList <IToken> tokens = new List <IToken>(); while (true) { Token t = tokenizer.next(); if (t.Symbol == "$") { break; //at end } //CommonToken is defined in the ANTLR runtime CommonToken T = new CommonToken(Syms.stringToInt[t.Symbol], t.Lexeme); T.Line = t.Line; tokens.Add(T); } var antlrtokenizer = new BufferedTokenStream(new ListTokenSource(tokens)); var parser = new ssuplParser(antlrtokenizer); parser.BuildParseTree = true; //optional: parser.ErrorHandler = new BailErrorStrategy (); //'start' should be the name of the grammar's start symbol var antlrroot = parser.start(); var listener = new CodeGenerator(); var walker = new ParseTreeWalker(); walker.Walk(listener, antlrroot); //Console.WriteLine(listener.code.Get(antlrroot).ToString()); var allcode = new ASM( "default rel", "section .text", "global main", "main:", listener.code.Get(antlrroot).ToString(), "ret", "section .data" ); Console.WriteLine(allcode.ToString()); using (var w = new StreamWriter("out.asm")) { w.Write(allcode.ToString()); } //Console.ReadLine(); }