/// <summary> /// Initializes a new instance of the <see cref="Parser"/> class. /// </summary> /// <param name="tokens"> /// tokens a serem processados /// </param> /// <param name="engine">engine a ser utilizada no processamento do template</param> public Parser(TokenList tokens, ITemplateEngine engine) { this.tokens = tokens; this.output = new StringBuilder(tokens.Count); this.templateEngine = engine; engine.Parser = this; }
public static TokenList GetOpeningBraces(TokenList tokens) { TokenList braces = new TokenList(); foreach (Token token in tokens) if (token.Term.IsSet(TermOptions.IsOpenBrace) && token.OtherBrace != null) braces.Add(token); return braces; }
public TokenList Tokenize(ITextStream stream, ILexingContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) break; if (ConsumeComment(stream, tokens)) continue; if (ConsumeNewLine(stream, tokens)) continue; if (ConsumeWhitespace(stream)) continue; if (ConsumeInterpolation(stream, tokens)) continue; Token token; if (TryCreateToken(stream, out token)) tokens.Add(token); } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return tokens; }
public static Element Parse(TokenList Tokens) { // Parse element type. ElementType type; switch (Tokens[0][0]) { //case "B": // type = ElementType.GaAsMESFET; break; case 'C': type = ElementType.Capacitor; break; case 'D': type = ElementType.Diode; break; case 'E': type = ElementType.VoltageControlledVoltageSource; break; case 'F': type = ElementType.CurrentControlledCurrentSource; break; case 'G': type = ElementType.VoltageControlledCurrentSource; break; case 'H': type = ElementType.CurrentControlledVoltageSource; break; case 'I': type = ElementType.CurrentSource; break; case 'J': type = ElementType.JunctionFieldEffectTransistor; break; //case 'K': type = ElementType.MutualInductor; break; case 'L': type = ElementType.Inductor; break; //case 'M': type = ElementType.MOSFET; break; //case 'N': type = ElementType.DigitalInputInterface; break; //case 'O': type = ElementType.DigitalOutputInterface; break; case 'P': type = ElementType.Port; break; case 'R': type = ElementType.Resistor; break; //case 'S': type = ElementType.VoltageControlledSwitch; break; //case 'T': type = ElementType.TransmissionLine; break; //case 'U': type = ; break; case 'V': type = ElementType.VoltageSource; break; default: throw new NotSupportedException("Unsupported SPICE element '" + Tokens[0][0] + "'."); } return new Element(type, Tokens[0], Tokens.Skip(1)); }
public void Assemble() { var tokenizer = new AssemblyTokenizer(source); tokenizer.Scan(); tokens = tokenizer.Tokens; Parse(); Build(); }
public static PBXElementArray ParseArrayAST(ArrayAST ast, TokenList tokens, string text) { PBXElementArray array = new PBXElementArray(); foreach (ValueAST east in ast.values) { array.values.Add(ParseValueAST(east, tokens, text)); } return array; }
/// <summary> /// Initializes an instance of the ReportItem class. /// </summary> /// <param name="descriptor">A message descriptor.</param> /// <param name="sourceFile">A source file.</param> /// <param name="sourceSpan">A source span.</param> /// <param name="sourceLine">A source line.</param> /// <param name="args">Some arguments required by the message descriptor.</param> public ReportItem(MessageDescriptor descriptor, SourceFile sourceFile, SourceSpan sourceSpan, TokenList sourceLine, params string[] args) { MessageDescriptor = descriptor; SourceFile = sourceFile; SourceSpan = sourceSpan; SourceLine = sourceLine; Arguments = args; }
public TokenStream(TokenList tokens, IParsingExecutionContext context) { Context = context; Tokens = tokens ?? new TokenList(); if (Tokens.Count == 0) Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0)); END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1]; CachedIndex = int.MinValue; }
public static PBXElementDict ParseTreeAST(TreeAST ast, TokenList tokens, string text) { PBXElementDict dict = new PBXElementDict(); foreach (KeyValueAST east in ast.values) { PBXElementString str = ParseIdentifierAST(east.key, tokens, text); PBXElement element = ParseValueAST(east.value, tokens, text); dict[str.value] = element; } return dict; }
public Assembler(string source) { AssemblyTokenizer tokenizer = new AssemblyTokenizer(source); tokenizer.Scan(); tokens = tokenizer.Tokens; instructions = new List<Instruction>(); labels = new Dictionary<string, Label>(); Parse(); Build(); }
public override TokenList Evaluate(IToken first, IToken last, TokenTreeList parameters) { TokenList firstList = first?.Evaluate(parameters); TokenList lastList = last?.Evaluate(parameters); if (firstList == null || lastList == null) throw new Exception($"Operation {Text} is a binary operation."); TokenList result = new TokenList(); result.AddRange(firstList); result.AddRange(lastList); return result; }
public override TokenList Simplify(IToken first, IToken last) { TokenList firstList = first?.Simplify(); TokenList lastList = last?.Simplify(); if (firstList == null || lastList == null) throw new Exception($"Operation {Text} is a binary operation."); TokenList result = new TokenList(); result.AddRange(firstList); result.AddRange(lastList); return result; }
public TokenList ScanAll() { TokenList list = new TokenList(); while (true) { Token tok = new Token(); this.ScanOne(tok); list.Add(tok); if (tok.type == UnityEditor.iOS.Xcode.PBX.TokenType.EOF) { return list; } } }
public static PBXElement ParseValueAST(ValueAST ast, TokenList tokens, string text) { if (ast is TreeAST) { return ParseTreeAST((TreeAST) ast, tokens, text); } if (ast is ArrayAST) { return ParseArrayAST((ArrayAST) ast, tokens, text); } if (ast is IdentifierAST) { return ParseIdentifierAST((IdentifierAST) ast, tokens, text); } return null; }
public static PBXElementString ParseIdentifierAST(IdentifierAST ast, TokenList tokens, string text) { Token token = tokens[ast.value]; UnityEditor.iOS.Xcode.PBX.TokenType type = token.type; if (type != UnityEditor.iOS.Xcode.PBX.TokenType.String) { if (type != UnityEditor.iOS.Xcode.PBX.TokenType.QuotedString) { throw new Exception("Internal parser error"); } } else { return new PBXElementString(text.Substring(token.begin, token.end - token.begin)); } return new PBXElementString(PBXStream.UnquoteString(text.Substring(token.begin, token.end - token.begin))); }
internal void Reset() { CurrentParserState = Parser.InitialState; CurrentParserInput = null; CurrentCommentTokens = new TokenList(); ParserStack.Clear(); HasErrors = false; ParserStack.Push(new ParseTreeNode(CurrentParserState)); CurrentParseTree = null; OpenBraces.Clear(); ParserTrace.Clear(); CurrentTerminals.Clear(); CurrentToken = null; PreviousToken = null; PreviousLineStart = new SourceLocation(0, -1, 0); BufferedTokens.Clear(); PreviewTokens.Clear(); Values.Clear(); foreach (var filter in TokenFilters) filter.Reset(); }
Token ReadComplexName(TokenKind targetKind) { if (NextToken.Kind != TokenKind.Dot) { if (targetKind == TokenKind.Name) { return(CurrentToken); } else { return(new Token(CurrentToken, targetKind)); } } //else //TODO:Need to test this more TokenList nameList = new TokenList(); do { TokenKind kind = CurrentToken.Kind; if (kind == TokenKind.Name) { nameList.Add(CurrentToken); if (NextToken.Kind != TokenKind.Dot) { break; } } else if (kind == TokenKind.Dot) { continue; } else { break; } } while (Advance()); return(new Token(targetKind, new Token(TokenKind.DottedName, nameList))); }
public void Test03_トークン解析() { var src = "int main() { 10 + 50 - 60; }"; var tokenList = TokenList.Tokenize(src); Assert.AreEqual(12, tokenList.Count); Assert.IsNotNull(tokenList.Expect(TokenKind.Type)); Assert.AreEqual("main", tokenList.ExpectIdent().Str); Assert.IsTrue(tokenList.Consume('(')); Assert.IsTrue(tokenList.Consume(')')); Assert.IsTrue(tokenList.Consume('{')); Assert.AreEqual(10, tokenList.ExpectNumber()); Assert.IsTrue(tokenList.Consume('+')); Assert.AreEqual(50, tokenList.ExpectNumber()); Assert.IsTrue(tokenList.Consume('-')); Assert.AreEqual(60, tokenList.ExpectNumber()); Assert.IsTrue(tokenList.Consume(';')); Assert.IsTrue(tokenList.Consume('}')); Assert.IsTrue(tokenList.IsEof()); Assert.AreEqual(0, CopmlileAndExecOnWsl(src)); }
public void Test13_block() { var src = @" int main() { int a; int i; a = 10; i = 0; for ( i = 0; i < 3; i = i + 1 ) { a = a + 1; a = a + 2; } return a; } "; var tokenList = TokenList.Tokenize(src); var nodeMap = NodeMap.Create(tokenList); Assert.IsTrue(ValidateNodeValuesAndOffsets(nodeMap)); Assert.AreEqual(19, CopmlileAndExecOnWsl(src)); }
public List <Token> GetExisitingTokens(TokenList tokenList, Token token) { var existingTokens = new List <Token>(); var nextToken = tokenList.GetEnumerator(); var hasNextToken = nextToken.MoveNext(); while (hasNextToken) { if (TokenManager.DoesWordTextExist(nextToken.Current, token, out var outIndex)) { token = TokenManager.GetTokenForID(token.ChildrenTokens[outIndex]); //Insert at front of list since the reverse training will need to be inverted existingTokens.Insert(0, token); } else { break; } hasNextToken = nextToken.MoveNext(); } return(existingTokens); }
private string TokensAsString(ITextProvider textProvider, TokenList tokens) { StringBuilder sb = new StringBuilder(); string formatString = "{0,-20} {1,-10} {2}\r\n"; sb.AppendFormat(formatString, "Token Type", "Length", "Token Text"); foreach (CssToken token in tokens) { string tokenText = string.Empty; if (token.TokenType != CssTokenType.EndOfFile) { tokenText = textProvider.GetText(token.Start, token.Length); tokenText = tokenText.Replace("\r", "\\r").Replace("\n", "\\n"); } sb.AppendFormat(formatString, token.TokenType.ToString(), token.Length, tokenText); } return(sb.ToString()); }
public List <Token> StartLexicalAnalysis() { while (this.GlobalIterator != Stream.Length) { foreach (var stateMachine in this.stateMachines) { stateMachine.Analyze(); } var results = this.stateMachines.Where(z => z.HasMatch).OrderByDescending(z => z.Match.Length).ToList(); if (results.Count() != 0) { Token token = new Token( results.FirstOrDefault().GetTypeOfMachineGeneratedToken(), results.FirstOrDefault().Match, this.GlobalIterator ); TokenList.Add(token); this.GlobalIterator += token.Argument.Length; } else { string errorMessage = string.Format("Nierozpoznano znaku na pozycji: {0}", this.GlobalIterator); ConsoleHelper.DisplayError(errorMessage); this.GlobalIterator++; } foreach (var stateMachine in this.stateMachines) { stateMachine.ResetStateMachine(this.GlobalIterator); } } return(TokenList); }
public void Test06_Comparators() { var comparators = new List <Tuple <NodeKind, string> >() { { Tuple.Create(NodeKind.Eq, "==") }, { Tuple.Create(NodeKind.Ne, "!=") }, { Tuple.Create(NodeKind.Lt, "<") }, { Tuple.Create(NodeKind.Le, "<=") }, }; foreach (var tuple in comparators) { var tokenList = TokenList.Tokenize("int main() { 10 " + tuple.Item2 + " 20; }"); var nodeMap = NodeMap.Create(tokenList); Assert.IsTrue(ValidateNodeValuesAndOffsets(nodeMap)); var head = nodeMap.Head.Nodes.Item1.Bodies[0]; Assert.AreEqual(tuple.Item1, head.Kind); Assert.AreEqual(10, head.Nodes.Item1.Value); Assert.AreEqual(20, head.Nodes.Item2.Value); } var inverseComparators = new List <Tuple <NodeKind, string> >() { { Tuple.Create(NodeKind.Lt, ">") }, { Tuple.Create(NodeKind.Le, ">=") }, }; foreach (var tuple in inverseComparators) { var tokenList = TokenList.Tokenize("int main() { 10 " + tuple.Item2 + " 20; }"); var nodeMap = NodeMap.Create(tokenList); Assert.IsTrue(ValidateNodeValuesAndOffsets(nodeMap)); var head = nodeMap.Head.Nodes.Item1.Bodies[0]; Assert.AreEqual(tuple.Item1, head.Kind); Assert.AreEqual(20, head.Nodes.Item1.Value); Assert.AreEqual(10, head.Nodes.Item2.Value); } }
public void TestCallWithMultipleArguments() { var programSource = new TokenList() { { TokenType.Identifier, "func" }, { TokenType.LParen }, { TokenType.IntLiteral, "2" }, { TokenType.Comma }, { TokenType.StringLiteral, "arg" }, { TokenType.RParen } }; Parser parser = new Parser(CreateMockScanner(programSource), new ErrorHandler()); ProgramNode program = parser.Parse(); var call = new CallStmt(0, 0); call.Arguments = new List <Expression>(); call.Arguments.Add(new IntLiteralExpr(0, 0, 2)); call.Arguments.Add(new StringLiteralExpr(0, 0, "arg")); call.ProcedureId = "func"; expected.Block.Statements.Add(call); program.ShouldBeEquivalentTo(expected); }
public void Test04_NodeMap解析() { var src = "int main() { 100 - ( 7 + 2 ) * 10 - 20 / 5; }"; var tokenList = TokenList.Tokenize(src); var nodeMap = NodeMap.Create(tokenList); Assert.IsTrue(ValidateNodeValuesAndOffsets(nodeMap)); var head = nodeMap.Head.Nodes.Item1.Bodies[0]; Assert.AreEqual(NodeKind.Sub, head.Kind); Assert.AreEqual(NodeKind.Sub, head.Nodes.Item1.Kind); Assert.AreEqual(NodeKind.Div, head.Nodes.Item2.Kind); Assert.AreEqual(100, head.Nodes.Item1.Nodes.Item1.Value); Assert.AreEqual(NodeKind.Mul, head.Nodes.Item1.Nodes.Item2.Kind); Assert.AreEqual(NodeKind.Add, head.Nodes.Item1.Nodes.Item2.Nodes.Item1.Kind); Assert.AreEqual(10, head.Nodes.Item1.Nodes.Item2.Nodes.Item2.Value); Assert.AreEqual(7, head.Nodes.Item1.Nodes.Item2.Nodes.Item1.Nodes.Item1.Value); Assert.AreEqual(2, head.Nodes.Item1.Nodes.Item2.Nodes.Item1.Nodes.Item2.Value); Assert.AreEqual(20, head.Nodes.Item2.Nodes.Item1.Value); Assert.AreEqual(5, head.Nodes.Item2.Nodes.Item2.Value); Assert.AreEqual(6, CopmlileAndExecOnWsl(src)); }
public void TrainTokenList(TokenList tokenList) { var backwardsTokens = GetExisitingTokens(tokenList, GetForwardsRoot()); tokenList.Invert(); var forwardsTokens = GetExisitingTokens(tokenList, GetBackwardsRoot()); if (forwardsTokens.Count == 0 || (backwardsTokens.Count != 0 && backwardsTokens.Count >= forwardsTokens.Count)) { TrainTokenList(tokenList, GetBackwardsRoot(), null); var forwardsTokenLinks = GetExisitingTokens(tokenList, GetBackwardsRoot()); tokenList.Invert(); TrainTokenList(tokenList, GetForwardsRoot(), backwardsTokens, forwardsTokenLinks); } else { tokenList.Invert(); TrainTokenList(tokenList, GetForwardsRoot(), null); var backwardsTokenLinks = GetExisitingTokens(tokenList, GetForwardsRoot()); tokenList.Invert(); TrainTokenList(tokenList, GetBackwardsRoot(), forwardsTokens, backwardsTokenLinks); } }
public void BuildAsciiText() { for (int line = 1; line <= MaxLines; ++line) { var lineBuilder = new StringBuilder(); var tokens = new List <string>(); TokenList.Add(tokens); for (int c = 1; c <= MaxColumns; ++c) // 4 columns { long value = value = line + c + GetLongOfWidth(1 + (line + c) % (MaxFieldWidth)); string token = value.ToString(Culture); lineBuilder.Append(token); tokens.Add(token); if (c < MaxColumns) { lineBuilder.Append(SeparationChar); } } LineList.Add(lineBuilder.ToString()); } }
internal void Reset() { CurrentParserState = Parser.InitialState; CurrentParserInput = null; CurrentCommentTokens = new TokenList(); ParserStack.Clear(); HasErrors = false; ParserStack.Push(new ParseTreeNode(CurrentParserState)); CurrentParseTree = null; OpenBraces.Clear(); ParserTrace.Clear(); CurrentTerminals.Clear(); CurrentToken = null; PreviousToken = null; PreviousLineStart = new SourceLocation(0, -1, 0); BufferedTokens.Clear(); PreviewTokens.Clear(); Values.Clear(); foreach (var filter in TokenFilters) { filter.Reset(); } }
// public TokenList Read(Stream str) { Tokenizer tokenizer = new Tokenizer(); TokenList inList = tokenizer.Tokenize(str); // TokenList outList = new TokenList(); CurrentTokenIter = inList.GetEnumerator(); NextTokenIter = inList.GetEnumerator(); NextTokenIter.MoveNext(); NextToken = NextTokenIter.Current; // ReadTokenLists(outList); // //DebugTextWriter writer = new DebugTextWriter(); StringWriter writer = new StringWriter(); inList.RawPrint(writer); #if DEBUG_PREPROCESSOR Debug.Print(writer.ToString()); #endif return(outList); }
/// <summary> /// 分析源代码获得Token序列 /// <para>当得到maxTokenCount数目的Token时(或源代码分析完毕时)返回</para> /// <para>下次执行时,将从上次执行结束的字符开始</para> /// </summary> /// <param name="maxTokenCount">应分析得到的Token数目最大值</param> /// <returns></returns> public TokenList <TEnumTokenType> Analyze(int maxTokenCount) { var tokens = new TokenList <TEnumTokenType>(); if (string.IsNullOrEmpty(this.GetSourceCode())) { return(tokens); } //analyze source code int count = this.GetSourceCode().Length; int foundTokens = 0; while (PtNextLetter < count && foundTokens < maxTokenCount) { var tk = NextToken(); if (tk != null) { tokens.Add(tk); foundTokens++; } } return(tokens); }
private bool ParseComment(TemplateStream stream, TokenList tokens, ContextStack context) { if (stream.Current != '/' || stream.Peek() != '/') { return(false); } var start = stream.Position; var line = stream.Line; while (stream.Advance()) { if (stream.Peek() == '\r') { stream.Advance(); break; } } tokens.Add(new Token(start, stream.Position - start, line, TokenType.Comment, context.Current)); return(true); }
/// <summary> /// Парсит конструкцию SWITCH. /// </summary> private static void ParseSwitch(TokenList body) { BaseGenerator generator = CodeManager.GetGenerator(parseMode); for (int index = 0; index < body.Count; index++) { TokenList nextTokens = body.GetRange(index); Token currentToken = body[index]; Token nextToken = body.Get(index + 1); if (currentToken.TypeIs(TokenType.NextLine)) { lineIndex++; continue; } // случай <expression>: if (currentToken.TypeIs(KeywordType.Case) && body[index + 2].TypeIs(TokenType.Colon)) { TokenList caseBody = GetCaseBody(nextTokens, out index, index); generator.AddCaseConstruction(nextToken); Parse(caseBody, out _); } // по_умолчанию: else if (currentToken.TypeIs(KeywordType.Default) && body[index + 1].TypeIs(TokenType.Colon)) { TokenList defaultBody = GetCaseBody(nextTokens, out index, index); generator.AddDefaultCaseConstruction(); Parse(defaultBody, out _); } // завершить; else if (currentToken.TypeIs(KeywordType.Break)) { generator.AddBreak(); } } }
public void Test_TrainTokenList_AllExistingTokens_NoLinks() { firstToken.WordText = "large"; secondToken.WordText = "old"; thirdToken.WordText = "big"; var existingTokens = new List <Token> { firstToken, secondToken, thirdToken }; var inputText = new TokenList("large old big"); TokenManager.SetTokenForID(firstToken.ID, firstToken); TokenManager.SetTokenForID(secondToken.ID, secondToken); TokenManager.SetTokenForID(thirdToken.ID, thirdToken); mockTokenMemoryManager.Object.TrainTokenList(inputText, mainToken, existingTokens); Assert.AreEqual(1, mainToken.ChildrenTokens.Count); var nextToken = TokenManager.GetTokenForID(mainToken.ChildrenTokens[0]); Assert.AreEqual(1, nextToken.ChildrenTokens.Count); Assert.AreEqual("large", nextToken.WordText); Assert.AreEqual(firstToken.ID, nextToken.ID); Assert.AreEqual(firstToken.PartnerID, nextToken.PartnerID); nextToken = TokenManager.GetTokenForID(nextToken.ChildrenTokens[0]); Assert.AreEqual(1, nextToken.ChildrenTokens.Count); Assert.AreEqual("old", nextToken.WordText); Assert.AreEqual(secondToken.ID, nextToken.ID); Assert.AreEqual(secondToken.PartnerID, nextToken.PartnerID); nextToken = TokenManager.GetTokenForID(nextToken.ChildrenTokens[0]); Assert.AreEqual(0, nextToken.ChildrenTokens.Count); Assert.AreEqual("big", nextToken.WordText); Assert.AreEqual(thirdToken.ID, nextToken.ID); Assert.AreEqual(thirdToken.PartnerID, nextToken.PartnerID); }
public string Convert(Grammar grammar, TokenList tokens) { _output = new StringBuilder(8192); //8k _output.AppendLine("<html>"); foreach (var token in tokens) { var term = token.Terminal; if (_atLineStart || term == grammar.Eof) { CheckOpeningClosingLists(token); CheckTableStatus(token); if (term == grammar.Eof) { break; } } if (term is WikiTerminalBase) { ProcessWikiToken(token); } else if (term == grammar.NewLine) { ProcessNewLine(token); } else //non-wike element and not new line { _output.Append(HtmlEncode(token.ValueString)); } _atLineStart = term == grammar.NewLine; //set for the next token }//foreach token _output.AppendLine(); _output.AppendLine("</html>"); return(_output.ToString()); }//method
public string Evaluate() { while (Position < Source.Length) { CollectionView.Position = Position; if (TrySkipEscapeSymbol()) { continue; } if (TryCloseTag()) { continue; } if (TryOpenTag()) { continue; } TokenList.AddCharacter(CollectionView.GetValue(0)); Position++; } return(TokenList.GetValue()); }
public void Test_TrainTokenList_NoExistingTokens_WithLinks() { var inputText = new TokenList("A big cat"); var linkedTokens = new List <Token>(); linkedTokens.Add(firstToken); linkedTokens.Add(secondToken); linkedTokens.Add(thirdToken); TokenManager.SetTokenForID(firstToken.ID, firstToken); TokenManager.SetTokenForID(secondToken.ID, secondToken); TokenManager.SetTokenForID(thirdToken.ID, thirdToken); mockTokenMemoryManager.Object.TrainTokenList(inputText, mainToken, new List <Token>(), linkedTokens); Assert.AreEqual(1, mainToken.ChildrenTokens.Count); var nextToken = TokenManager.GetTokenForID(mainToken.ChildrenTokens[0]); Assert.AreEqual(1, nextToken.ChildrenTokens.Count); Assert.AreEqual("A", nextToken.WordText); Assert.AreEqual(firstToken.ID, nextToken.PartnerID); Assert.AreEqual(nextToken.ID, firstToken.PartnerID); nextToken = TokenManager.GetTokenForID(nextToken.ChildrenTokens[0]); Assert.AreEqual(1, nextToken.ChildrenTokens.Count); Assert.AreEqual("big", nextToken.WordText); Assert.AreEqual(secondToken.ID, nextToken.PartnerID); Assert.AreEqual(nextToken.ID, secondToken.PartnerID); nextToken = TokenManager.GetTokenForID(nextToken.ChildrenTokens[0]); Assert.AreEqual(null, nextToken.ChildrenTokens); Assert.AreEqual("cat", nextToken.WordText); Assert.AreEqual(thirdToken.ID, nextToken.PartnerID); Assert.AreEqual(nextToken.ID, thirdToken.PartnerID); }
private RadicalToken processRadical(Radical r) { TokenList degree = new TokenList(); TokenList radBase = new TokenList(); if (!r.Degree.HasChildren) { degree.Append(new TextRunToken("2")); } else { foreach (var child in r.Degree) { degree.Append(processElement(child)); } } foreach (var child in r.Base) { radBase.Append(processElement(child)); } return(new RadicalToken(radBase, degree)); }
public void TestFunctionDeclaration() { var programSource = new TokenList() { { TokenType.KwFunction }, { TokenType.Identifier, "func" }, { TokenType.LParen }, { TokenType.Identifier, "par1" }, { TokenType.Colon }, { TokenType.Identifier, "int" }, { TokenType.RParen }, { TokenType.Colon }, { TokenType.Identifier, "bool" }, { TokenType.LineTerm }, { TokenType.KwBegin }, { TokenType.KwReturn }, { TokenType.IntLiteral, "123" }, { TokenType.KwEnd } }; Parser parser = new Parser(CreateMockScanner(programSource), new ErrorHandler()); ProgramNode program = parser.Parse(); var declr = new FunctionDeclarationStmt(0, 0); declr.Identifier = "func"; declr.ReturnType = new SimpleType(0, 0, ExprType.Bool); declr.AddParameter("par1", new SimpleType(0, 0, ExprType.Int), false); declr.ProcedureBlock = new BlockStmt(0, 0); var returnStmt = new ReturnStmt(0, 0); returnStmt.ReturnExpression = new IntLiteralExpr(0, 0, 123); declr.ProcedureBlock.Statements.Add(returnStmt); expected.Block.Statements.Add(declr); program.ShouldBeEquivalentTo(expected); }
protected override ParsingInfo Parse(TokenList list) { var token = list.Get <TokenType>(); if (token == null) { errorInfo.Add(new ParseErrorInfo() { expected = (typeof(TokenType).Name), got = (list.Current.ToString()), tokenGot = list.Current, }); Error = true; return(null); } if (filter != null && filter(token) == false) { list.MoveToPrevious(); errorInfo.Add(new ParseErrorInfo() { expected = (typeof(TokenType).Name), got = (list.Current.ToString() + ", but filter failed"), tokenGot = list.Current, }); Error = true; return(null); } ParsingInfo info = new ParsingInfo { { name, token } }; return(info); }
internal void ParseExpert(Token tok) { Advance();//past expert Token name = CurrentToken; Advance();//past name // Token baseName = TokenInstance.CSharp.EXPERT; if (CurrentToken.Kind == TokenKind.RoundList) { TokenList tokList = CurrentList; if (tokList.Count != 0) { baseName = tokList[0]; } Advance();//past base name } // ExpertDef def = new ExpertDef(name, baseName); CurrentNode.AddChild(def); ParseLines(def); }
private bool ConsumeCommentText(ITextStream stream, TokenList tokens, Func <ITextStream, bool> predicate) { int start = stream.Position; while (stream.Position < stream.Length) { if (predicate(stream)) { break; } stream.Advance(); } if (start != stream.Position) { stream.Reverse(1); stream.Advance(); tokens.Add(Token.Create(TokenType.CommentText, start, stream.Position - start)); return(true); } return(false); }
public static Model Parse(TokenList Tokens) { string name = Tokens[1]; string type = Tokens[2]; Component template; if (!ModelTemplates.TryGetValue(type, out template)) throw new NotSupportedException("Model type '" + type + "' not supported."); Component impl = template.Clone(); impl.PartNumber = name; for (int i = 3; i < Tokens.Count; ++i) { PropertyInfo p = FindTemplateProperty(template, Tokens[i]); if (p != null) { TypeConverter tc = TypeDescriptor.GetConverter(p.PropertyType); p.SetValue(impl, tc.ConvertFrom(ParseValue(Tokens[i + 1]).ToString()), null); } } return new Model(impl, "Imported from SPICE model '" + Tokens.Text + "'."); }
public void Visit(Token parent, TokenList children) { TreeNode parentNode = null; if (nodesAdded.ContainsKey(parent)) { parentNode = nodesAdded[parent]; } else if (parent == lib.sqlparser.Query.rootQuery) { parentNode = AddNode(null, null, parent); } if (parentNode != null) { foreach (Token t in children.tokens) { AddNode(parent, parentNode, t); if (IsExpanded(parent)) { parentNode.Expand(); } } } }
public static bool Check(this TokenList tokenList, out string errorInfo) { bool error = false; errorInfo = string.Empty; StringBuilder builder = new StringBuilder(); builder.AppendLine("Error tokens:"); foreach (var token in tokenList) { if (token.LexicalError) { builder.AppendLine(token.ToString()); error = true; } } if (error) { errorInfo = builder.ToString(); } return(!error); }
protected override ParsingInfo Parse(TokenList list) { foreach (var rule in possibilities) { var info = rule.Execute(list); this.checkPoint = rule.checkPoint; if (info != null) { return(info); } else { AddChildErrors(rule.errorInfo); if (checkPoint != null) { //Console.WriteLine("hit check point " + checkPoint.name); return(null); } } } Error = true; return(null); }
/// <summary> /// Constructs a new <c>HsqlCommand</c> instance that /// is a copy of the given command object. /// </summary> /// <param name="srcCommand">The source command.</param> private HsqlCommand(HsqlCommand srcCommand) : this() { m_commandTextHasParameters = srcCommand.m_commandTextHasParameters; if (srcCommand.m_commandTextHasParameters && (srcCommand.m_tokenList != null)) { m_tokenList = srcCommand.m_tokenList.Clone(); } this.CommandText = srcCommand.CommandText; this.CommandTimeout = srcCommand.CommandTimeout; this.CommandType = srcCommand.CommandType; this.Connection = srcCommand.Connection; this.DesignTimeVisible = srcCommand.DesignTimeVisible; this.Transaction = srcCommand.Transaction; // CHECKME this.UpdatedRowSource = srcCommand.UpdatedRowSource; HsqlParameterCollection parameters = this.Parameters; foreach (HsqlParameter parameter in srcCommand.Parameters) { parameters.Add(parameter.Clone()); } }
//Parser State Methods public void CreateState(TokenList list) { StateStack.Push(new ParserState(list)); }
private static void PreprocessTokens(TokenList tokens) { var result = new TokenList(); Token prevToken = null; bool prevIsParaBreak = true; bool insidePara = true; foreach(var token in tokens) { //fix heading ends: ending '=' chars in Headings should be ignored var wikiTerm = token.Terminal as WikiTerminalBase; bool isHeading = (wikiTerm != null && wikiTerm.TermType == WikiTermType.Block && wikiTerm.OpenTag.StartsWith("=")); if (isHeading) { token.Value = token.ValueString.TrimEnd(' '); //first trim trailing spaces token.Value = token.ValueString.TrimEnd('='); //now trim ='s }//if var termName = token.Terminal.Name; var paraBreak = termName.StartsWith("h") || termName.StartsWith("bl") || termName.StartsWith("nl") || termName.StartsWith("table"); //token.Terminal.Options = TermOptions.IsDelimiter; //check for paragraph start if (!insidePara && !paraBreak && prevIsParaBreak) prevToken = token; prevIsParaBreak = paraBreak; }//for each foreach(var token in tokens) { } }
public static PBXElementString ParseIdentifierAST(IdentifierAST ast, TokenList tokens, string text) { Token tok = tokens[ast.value]; string value; switch (tok.type) { case TokenType.String: value = text.Substring(tok.begin, tok.end - tok.begin); return new PBXElementString(value); case TokenType.QuotedString: value = text.Substring(tok.begin, tok.end - tok.begin); value = PBXStream.UnquoteString(value); return new PBXElementString(value); default: throw new Exception("Internal parser error"); } }
public static PBXElementArray ParseArrayAST(ArrayAST ast, TokenList tokens, string text) { var el = new PBXElementArray(); foreach (var v in ast.values) { el.values.Add(ParseValueAST(v, tokens, text)); } return el; }
public static PBXElementDict ParseTreeAST(TreeAST ast, TokenList tokens, string text) { var el = new PBXElementDict(); foreach (var kv in ast.values) { PBXElementString key = ParseIdentifierAST(kv.key, tokens, text); PBXElement value = ParseValueAST(kv.value, tokens, text); el[key.value] = value; } return el; }
internal ParsedSource(String text, TokenList tokens, AstNode root) { Text = text; Tokens = tokens; Root = root; }
private int DangerousExecution(TokenList list) { int tokenIndex = -1; int foreachindex = 0; List<string> QueryBreakers = new List<string>() { "CREATE", "DROP", "USE", "SELECT", "UPDATE", "DELETE", "DECLARE", "PRINT", "IF", "BEGIN", "TRANSACTION", "RAISERROR", "INSERT", "TRUNCATE", "FETCH", "INTO", "CLOSE", "DEALLOCATE", "ALTER", "EXEC", "OPEN", "COMMIT", "ROLLBACK"}; List<string> CheckForDangerWords = new List<string>() { "UPDATE", "DELETE" }; string DangerCleaner = "WHERE"; foreach (Token token in list.List) { if (tokenIndex >= 0) { if(token.Text.ToUpper().Equals(DangerCleaner)) { tokenIndex = -1; } else if (QueryBreakers.Contains(token.Text.ToUpper())) { return tokenIndex; } } else { if (CheckForDangerWords.Contains(token.Text.ToUpper())) { tokenIndex = foreachindex; } } foreachindex++; } return tokenIndex; }
//Note: we don't actually parse in current version, only scan. Will implement full parsing in the future, // to support all intellisense operations private void ParseSource(String newText) { TokenList newTokens = new TokenList(); //Explicitly catch the case when new text is empty if (newText != string.Empty) { SourceFile srcFile = new SourceFile(newText, "source"); _compiler.Scanner.Prepare(_context, srcFile); IEnumerable<Token> tokenStream = _compiler.Scanner.BeginScan(); foreach (Token _token in tokenStream) { newTokens.Add(_token); } //newTokens.AddRange(tokenStream); } //finally create new contents object and replace the existing _contents value _parsedSource = new ParsedSource(newText, newTokens, null); //notify views var views = GetViews(); foreach (var view in views) view.UpdateParsedSource(_parsedSource); }
public Interfaces.ITokenList TokenizeSQL(string inputSQL) { TokenList tokenContainer = new TokenList(); StringReader inputReader = new StringReader(inputSQL); SqlTokenizationType? currentTokenizationType; StringBuilder currentTokenValue = new StringBuilder(); int commentNesting; currentTokenizationType = null; currentTokenValue.Length = 0; commentNesting = 0; int currentCharInt = inputReader.Read(); while (currentCharInt >= 0) { char currentCharacter = (char)currentCharInt; if (currentTokenizationType == null) { ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } else { switch (currentTokenizationType.Value) { case SqlTokenizationType.WhiteSpace: if (IsWhitespace(currentCharacter)) { currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SinglePeriod: if (currentCharacter >= '0' && currentCharacter <= '9') { currentTokenizationType = SqlTokenizationType.DecimalValue; currentTokenValue.Append('.'); currentTokenValue.Append(currentCharacter); } else { currentTokenValue.Append('.'); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleZero: if (currentCharacter == 'x' || currentCharacter == 'X') { currentTokenizationType = SqlTokenizationType.BinaryValue; currentTokenValue.Append('0'); currentTokenValue.Append(currentCharacter); } else if (currentCharacter >= '0' && currentCharacter <= '9') { currentTokenizationType = SqlTokenizationType.Number; currentTokenValue.Append('0'); currentTokenValue.Append(currentCharacter); } else if (currentCharacter == '.') { currentTokenizationType = SqlTokenizationType.DecimalValue; currentTokenValue.Append('0'); currentTokenValue.Append(currentCharacter); } else { currentTokenValue.Append('0'); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.Number: if (currentCharacter == 'e' || currentCharacter == 'E') { currentTokenizationType = SqlTokenizationType.FloatValue; currentTokenValue.Append(currentCharacter); } else if (currentCharacter == '.') { currentTokenizationType = SqlTokenizationType.DecimalValue; currentTokenValue.Append(currentCharacter); } else if (currentCharacter >= '0' && currentCharacter <= '9') { currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.DecimalValue: if (currentCharacter == 'e' || currentCharacter == 'E') { currentTokenizationType = SqlTokenizationType.FloatValue; currentTokenValue.Append(currentCharacter); } else if (currentCharacter >= '0' && currentCharacter <= '9') { currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.FloatValue: if (currentCharacter >= '0' && currentCharacter <= '9') { currentTokenValue.Append(currentCharacter); } else if (currentCharacter == '-' && currentTokenValue.ToString().EndsWith("e", StringComparison.OrdinalIgnoreCase)) { currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.BinaryValue: if ((currentCharacter >= '0' && currentCharacter <= '9') || (currentCharacter >= 'A' && currentCharacter <= 'F') || (currentCharacter >= 'a' && currentCharacter <= 'f') ) { currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleDollar: currentTokenValue.Append('$'); currentTokenValue.Append(currentCharacter); if ((currentCharacter >= 'A' && currentCharacter <= 'Z') || (currentCharacter >= 'a' && currentCharacter <= 'z') ) currentTokenizationType = SqlTokenizationType.PseudoName; else currentTokenizationType = SqlTokenizationType.MonetaryValue; break; case SqlTokenizationType.MonetaryValue: if (currentCharacter >= '0' && currentCharacter <= '9') { currentTokenValue.Append(currentCharacter); } else if (currentCharacter == '-' && currentTokenValue.Length == 1) { currentTokenValue.Append(currentCharacter); } else if (currentCharacter == '.' && !currentTokenValue.ToString().Contains(".")) { currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleHyphen: if (currentCharacter == '-') { currentTokenizationType = SqlTokenizationType.SingleLineComment; } else if (currentCharacter == '=') { currentTokenizationType = SqlTokenizationType.OtherOperator; currentTokenValue.Append('-'); currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { currentTokenizationType = SqlTokenizationType.OtherOperator; currentTokenValue.Append('-'); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleSlash: if (currentCharacter == '*') { currentTokenizationType = SqlTokenizationType.BlockComment; commentNesting++; } else if (currentCharacter == '/') { currentTokenizationType = SqlTokenizationType.SingleLineCommentCStyle; } else if (currentCharacter == '=') { currentTokenizationType = SqlTokenizationType.OtherOperator; currentTokenValue.Append('/'); currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { currentTokenizationType = SqlTokenizationType.OtherOperator; currentTokenValue.Append('/'); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleLineComment: case SqlTokenizationType.SingleLineCommentCStyle: if (currentCharacter == (char)13 || currentCharacter == (char)10) { currentTokenValue.Append(currentCharacter); int nextCharInt = inputReader.Peek(); if (currentCharacter == (char)13 && nextCharInt == 10) currentTokenValue.Append((char)inputReader.Read()); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { currentTokenValue.Append(currentCharacter); } break; case SqlTokenizationType.BlockComment: if (currentCharacter == '*') { if (inputReader.Peek() == (int)'/') { commentNesting--; char nextCharacter = (char)inputReader.Read(); if (commentNesting > 0) { currentTokenValue.Append(currentCharacter); currentTokenValue.Append(nextCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } } else { currentTokenValue.Append(currentCharacter); } } else { currentTokenValue.Append(currentCharacter); if (currentCharacter == '/' && inputReader.Peek() == (int)'*') { currentTokenValue.Append((char)inputReader.Read()); commentNesting++; } } break; case SqlTokenizationType.OtherNode: case SqlTokenizationType.PseudoName: if (IsNonWordCharacter(currentCharacter)) { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } else { currentTokenValue.Append(currentCharacter); } break; case SqlTokenizationType.SingleN: if (currentCharacter == '\'') { currentTokenizationType = SqlTokenizationType.NString; } else { if (IsNonWordCharacter(currentCharacter)) { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } else { currentTokenizationType = SqlTokenizationType.OtherNode; currentTokenValue.Append('N'); currentTokenValue.Append(currentCharacter); } } break; case SqlTokenizationType.NString: case SqlTokenizationType.String: if (currentCharacter == '\'') { if (inputReader.Peek() == (int)'\'') { inputReader.Read(); currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } } else { currentTokenValue.Append(currentCharacter); } break; case SqlTokenizationType.QuotedString: if (currentCharacter == '"') { if (inputReader.Peek() == (int)'"') { inputReader.Read(); currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } } else { currentTokenValue.Append(currentCharacter); } break; case SqlTokenizationType.BracketQuotedName: if (currentCharacter == ']') { if (inputReader.Peek() == (int)']') { inputReader.Read(); currentTokenValue.Append(currentCharacter); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } } else { currentTokenValue.Append(currentCharacter); } break; case SqlTokenizationType.SingleLT: currentTokenValue.Append('<'); currentTokenizationType = SqlTokenizationType.OtherOperator; if (currentCharacter == '=' || currentCharacter == '>' || currentCharacter == '<') { currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleGT: currentTokenValue.Append('>'); currentTokenizationType = SqlTokenizationType.OtherOperator; if (currentCharacter == '=' || currentCharacter == '>') { currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleAsterisk: currentTokenValue.Append('*'); if (currentCharacter == '=') { currentTokenValue.Append(currentCharacter); currentTokenizationType = SqlTokenizationType.OtherOperator; CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleOtherCompoundableOperator: currentTokenizationType = SqlTokenizationType.OtherOperator; if (currentCharacter == '=') { currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SinglePipe: currentTokenizationType = SqlTokenizationType.OtherOperator; currentTokenValue.Append('|'); if (currentCharacter == '=' || currentCharacter == '|') { currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleEquals: currentTokenValue.Append('='); if (currentCharacter == '=') { currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; case SqlTokenizationType.SingleExclamation: currentTokenValue.Append('!'); if (currentCharacter == '=' || currentCharacter == '<' || currentCharacter == '>') { currentTokenizationType = SqlTokenizationType.OtherOperator; currentTokenValue.Append(currentCharacter); CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } else { currentTokenizationType = SqlTokenizationType.OtherNode; CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); ProcessOrOpenToken(ref currentTokenizationType, currentTokenValue, currentCharacter, tokenContainer); } break; default: throw new Exception("In-progress node unrecognized!"); } } currentCharInt = inputReader.Read(); } if (currentTokenizationType != null) { if (currentTokenizationType.Value == SqlTokenizationType.BlockComment || currentTokenizationType.Value == SqlTokenizationType.String || currentTokenizationType.Value == SqlTokenizationType.NString || currentTokenizationType.Value == SqlTokenizationType.QuotedString || currentTokenizationType.Value == SqlTokenizationType.BracketQuotedName ) tokenContainer.HasUnfinishedToken = true; CompleteToken(ref currentTokenizationType, tokenContainer, currentTokenValue); } return tokenContainer; }
public BlockDefCmd(Identifier id, DefBlockBase baseType, TokenList<DefBlockBodyItem> body) { this.id = id; this.baseType = baseType; this.body = body; }
/// <summary> /// Releases, if present, the underlying <c>HsqlStatement</c> and /// makes eligible for garbage collection any related resources. /// </summary> internal void InvalidateStatement() { try { // localize member references to minimize // potential race conditions regarding // null status of instance variables. HsqlConnection connection = m_dbConnection; HsqlStatement statement = m_statement; // Don't leak compiled statement handles if (connection != null && connection.State == ConnectionState.Open && statement != null) { statement.Free(Session); } } finally { m_statement = null; m_tokenList = null; m_storedProcedureCommandText = null; m_tableDirectCommandText = null; } }
public Parser(TokenList tokens) { this.tokens = tokens; currPos = SkipComments(0); }
public DefBlockService(Identifier id, Optional<TokenList<Identifier>> optArgList, TokenList<DefBlockServiceLine> body) { this.id = id; this.optArgList = optArgList; this.body = body; }
public Parser(TokenList tokens) { this.tokens = tokens; currPos = SkipComments(0); }