public static void Main(string[] args) { SimpleCalculator calculator = new SimpleCalculator(); String script = "int a = b + 3;"; Console.WriteLine("解析变量声明语句:" + script); SimpleLexer lexer = new SimpleLexer(); ITokenReader tokens = lexer.tokenize(script); try { SimpleASTNode node = calculator.intDeclare(tokens); calculator.dumpAST(node, ""); }catch (Exception e) { Console.WriteLine(e.Message); } script = "2+3*5"; Console.WriteLine("\n计算:{0}", script); calculator.evaluate(script); script = "2+"; Console.WriteLine("\n计算:{0}", script); calculator.evaluate(script); script = "2+3+4*2+10+10/5"; Console.WriteLine("\n计算:{0}", script); calculator.evaluate(script); Console.ReadKey(); }
// TODO Clean up. public IfNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { Conditions = new List<ConditionSet>(); var condition = ExpressionRewriter.Rewrite(tokenReader); var curConditionSet = new ConditionSet(condition); Conditions.Add(curConditionSet); var childrenNodes = nodeReader.TakeWhile((node) => !IsEndDirective(node, DirectiveName)); foreach(var node in childrenNodes) { bool isConditional; var newCondition = GetConditionFromNode(node, out isConditional); if(isConditional) { if(newCondition != null && curConditionSet.Condition == null) { throw new BadDataException("Can't have an elseif node after an else node", node.Location); } curConditionSet = new ConditionSet(newCondition); Conditions.Add(curConditionSet); continue; } curConditionSet.ChildrenNodes.Add(node); } }
/* * 语法解析:乘法表达式 * **/ private SimpleASTNode multiplicative(ITokenReader tokens) { SimpleASTNode child1 = primary(tokens); SimpleASTNode node = child1; IToken token = tokens.peek(); if (child1 != null && token != null) { if (token.getType() == TokenType.Star || token.getType() == TokenType.Slash) { token = tokens.read(); SimpleASTNode child2 = primary(tokens); if (child2 != null) { node = new SimpleASTNode(ASTNodeType.Multiplicative, token.getText()); node.addChild(child1); node.addChild(child2); } else { throw (new FormatException("无效的乘法表达式!")); } } } return(node); }
/// <summary> /// Initalize using default settings. /// </summary> /// <param name="settings"></param> public virtual void Init(LexSettings settings) { _reader = new TokenReader(); _errors = new List <string>(); _tokenList = new List <string>(); _settings = settings; }
public LetNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { var startLocation = tokenReader.CurrentLocation; Token token = tokenReader.ReadToken(); if(token == null) { throw new MissingDataException("Variable name", startLocation); } if(token.TokenType != TokenType.Identifier) { throw new MissingDataException("Variable name", token.Location); } Variable = token.Value.ToString(); var shorthand = ReadShorthandNode(tokenReader); if(shorthand != null) { ChildrenNodes = new List<NodeBase> { shorthand }; } else { ChildrenNodes = new List<NodeBase>(nodeReader.TakeWhile((node) => !IsEndDirective(node, DirectiveName))); } }
public EachNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { var startLocation = tokenReader.CurrentLocation; Token token = tokenReader.ReadToken(); if(token == null) { throw new MissingDataException("Variable name", startLocation); } if(token.TokenType != TokenType.Identifier) { throw new MissingDataException("Variable name", token.Location); } Variable = token.Value.ToString(); Values = ExpressionRewriter.Rewrite(tokenReader); ChildrenNodes = new List<NodeBase>(nodeReader.TakeWhile((node) => { var endDirective = node as EndDirectiveNode; if(endDirective != null && endDirective.TargetDirectiveName == DirectiveName) { return false; } return true; })); }
private ValueOrError <IConceptInfo> ParseMembers(ITokenReader tokenReader, IConceptInfo lastConcept, bool readingAReference) { IConceptInfo conceptInfo = (IConceptInfo)Activator.CreateInstance(ConceptInfoType); bool firstMember = true; bool lastPropertyWasInlineParent = false; bool lastConceptUsed = false; var listOfMembers = readingAReference ? Members.Where(m => m.IsKey) : Members.Where(m => m.IsParsable); foreach (ConceptMember member in listOfMembers) { var valueOrError = ReadMemberValue(member, tokenReader, lastConcept, firstMember, ref lastPropertyWasInlineParent, ref lastConceptUsed, readingAReference); if (valueOrError.IsError) { return(ValueOrError <IConceptInfo> .CreateError(string.Format(CultureInfo.InvariantCulture, "Cannot read the value of {0} in {1}. {2}", member.Name, ConceptInfoType.Name, valueOrError.Error))); } member.SetMemberValue(conceptInfo, valueOrError.Value); firstMember = false; } if (!lastConceptUsed && lastConcept != null) { return(ValueOrError <IConceptInfo> .CreateError(string.Format( "This concept cannot be enclosed within {0}. Trying to read {1}.", lastConcept.GetType().Name, ConceptInfoType.Name))); } return(ValueOrError <IConceptInfo> .CreateValue(conceptInfo)); }
public void CanParse() { ITokenReader reader = Create(); string current = reader.ReadChar(); Assert.AreEqual("a", current); Assert.AreEqual("b", reader.PeekChar()); Assert.AreEqual("bc", reader.PeekChars(2)); Assert.AreEqual("bcd", reader.ReadChars(3)); Assert.AreEqual("1234", reader.ReadChars(4)); reader.ReadChar(); Assert.IsTrue(reader.IsToken()); Assert.AreEqual("4", reader.PreviousChar); reader.ReadBackChar(); Assert.AreEqual("4", reader.CurrentChar); reader.ReadChar(); reader.ReadChar(); Assert.IsTrue(reader.IsWhiteSpace()); reader.ReadChars(2); Assert.IsTrue(reader.IsWhiteSpace()); reader.ReadChar(); Assert.IsTrue(reader.IsEscape()); reader.ConsumeWhiteSpace(); Assert.AreEqual(reader.CurrentChar, "\\"); reader.ReadChar(); Assert.AreEqual(reader.CurrentChar, "b"); reader.ConsumeWhiteSpace(); Assert.AreEqual(reader.CurrentChar, "b"); reader.ConsumeWhiteSpace(true); Assert.AreEqual(reader.CurrentChar, "k"); }
public RepNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { Value = ExpressionRewriter.Rewrite(tokenReader); ChildrenNodes = new List<NodeBase>(nodeReader.TakeWhile((node) => !IsEndDirective(node, DirectiveName))); }
private void OnKeyword(ITokenReader iTokenReader, string keyword) { var tokenReader = (TokenReader)iTokenReader; if (tokenReader.PositionInTokenList >= result.Tokens.Count) { return; } var lastToken = result.Tokens[tokenReader.PositionInTokenList]; if (keyword == null && tokenReader.PositionInTokenList > 0) { lastToken = result.Tokens[tokenReader.PositionInTokenList - 1]; } if (lastToken.PositionInDslScript <= targetPos) { if (keyword != null) { result.KeywordToken = lastToken; result.ActiveConceptValidTypes.Clear(); result.LastTokenParsed.Clear(); result.LastMemberReadAttempt.Clear(); } else if (targetPos > lastToken.PositionInDslScript) { result.KeywordToken = null; } } else if (result.NextKeywordToken == null) { result.NextKeywordToken = lastToken; } }
public IncludeNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { Filename = ExpressionRewriter.Rewrite(tokenReader); this.parentParser = nodeReader as Parser.Parser; }
public ForNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : this(directiveName, location) { var parameters = ExpressionRewriter.Rewrite(tokenReader); if(!parameters.Token.IsSymbol(",")) { throw new DataTypeException("Expected comma-separated list", this); } var children = parameters.ChildrenTokenNodes; if(children.Count < 3 || children.Count > 4) { throw new MissingDataException("#for directive requires 3 to 4 parameters", location); } if(children[0].Token.TokenType != TokenType.Identifier) { throw new MissingDataException("Identifier", children[0].Location); } Variable = children[0].Token.ToString(); Start = children[1]; End = children[2]; Step = children.Count > 3 ? children[3] : null; ChildrenNodes = new List<NodeBase>(nodeReader.TakeWhile((node) => !IsEndDirective(node, DirectiveName))); }
private void OnMemberRead(ITokenReader iTokenReader, ConceptSyntaxNode conceptInfo, ConceptMemberSyntax conceptMember, ValueOrError <object> valueOrError) { // have we reached a new keyword after target pos? if so, prevent further member parsing if (result.NextKeywordToken != null) { return; } var tokenReader = (TokenReader)iTokenReader; if (tokenReader.PositionInTokenList <= 0 || lastTokenBeforeTarget == null) { return; } var concept = conceptInfo.Concept; var lastTokenRead = result.Tokens[tokenReader.PositionInTokenList - 1]; // track last tokens/members parsed before or at target if (lastTokenRead.PositionInDslScript <= lastTokenBeforeTarget.PositionInDslScript && !valueOrError.IsError) { result.LastTokenParsed[concept] = lastTokenRead; result.LastMemberReadAttempt[concept] = conceptMember; } // we are interested in those concepts whose member parsing stops at or after target position if (lastTokenRead.PositionInDslScript >= lastTokenBeforeTarget.PositionInDslScript && !result.ActiveConceptValidTypes.Contains(concept)) { result.ActiveConceptValidTypes.Add(concept); } }
public virtual ValueOrError <IConceptInfo> Parse(ITokenReader tokenReader, Stack <IConceptInfo> context) { if (tokenReader.TryRead(Keyword)) { return(ParseMembers(tokenReader, context.Count > 0 ? context.Peek() : null, false)); } return(ValueOrError <IConceptInfo> .CreateError("")); }
/* * 解析脚本,并返回根节点 * @param code */ public IASTNode parse(string code) { SimpleLexer lexer = new SimpleLexer(); ITokenReader tokens = lexer.tokenize(code); IASTNode rootNode = prog(tokens); return(rootNode); }
internal Tokenizer(Stream input) { LineNo = 1; _fintag = false; _cstr = null; _input = new StreamTokenReader(new StreamReader(input)); _sbarray = new char[_sbmaxsize]; _sbindex = 0; }
internal Tokenizer(char[] array) { LineNo = 1; _fintag = false; _cstr = null; _input = new CharTokenReader(array); _sbarray = new char[_sbmaxsize]; _sbindex = 0; }
internal Tokenizer(BinaryReader input) { LineNo = 1; _fintag = false; _cstr = null; _input = new TokenReader(input); _sbarray = new char[_sbmaxsize]; _sbindex = 0; }
internal Tokenizer (BinaryReader input) { LineNo = 1 ; _fintag = false ; _cstr = null ; _input = new TokenReader(input) ; _sbarray = new char[_sbmaxsize]; _sbindex = 0; }
internal TokenScanner(Lexer lexer, string text) { if (lexer == null) { throw new ArgumentNullException(nameof(lexer)); } _lexer = lexer; _reader = new TokenStringReader(text); }
internal TokenScanner(Lexer lexer, Stream stream) { if (lexer == null) { throw new ArgumentNullException(nameof(lexer)); } _lexer = lexer; _reader = new TokenStreamReader(stream); }
/// <summary> /// Initializes a new pratt parser /// </summary> /// <param name="tokenReader"></param> /// <param name="prefixModuleTree"></param> /// <param name="infixModuleTree"></param> /// <param name="diagnostics"></param> protected internal PrattParser( ITokenReader <TTokenType> tokenReader, PrattParserModuleTree <TTokenType, IPrefixParselet <TTokenType, TExpressionNode> > prefixModuleTree, PrattParserModuleTree <TTokenType, IInfixParselet <TTokenType, TExpressionNode> > infixModuleTree, DiagnosticList diagnostics) : base(diagnostics, tokenReader) { PrefixModuleTree = prefixModuleTree ?? throw new ArgumentNullException(nameof(prefixModuleTree)); InfixModuleTree = infixModuleTree ?? throw new ArgumentNullException(nameof(infixModuleTree)); }
/* * 语法解析:根节点 * **/ private SimpleASTNode prog(ITokenReader tokens) { SimpleASTNode node = new SimpleASTNode(ASTNodeType.Programm, "Calculator"); SimpleASTNode child = additive(tokens); if (child != null) { node.addChild(child); } return(node); }
public KagParser(ITokenReader reader) : base(reader) { Register(TokenType.Asterisk, new LabelParselet()); Register(TokenType.Identifier, new IdentifierParselet()); Register(TokenType.Text, new TextParselet()); Register(TokenType.LeftBracket, new TagParselet()); Register(TokenType.Equals, new ParameterParselet()); Register(TokenType.At, new TagParselet()); Register(TokenType.Hash, new TitleParselet()); Register(TokenType.LineEnd, new EndOfLineParselet()); }
private void OnUpdateContext(ITokenReader iTokenReader, Stack <ConceptSyntaxNode> context, bool isOpening) { var tokenReader = (TokenReader)iTokenReader; var lastToken = result.Tokens[tokenReader.PositionInTokenList - 1]; var contextPos = lastToken.PositionEndInDslScript; if (contextPos <= targetPos) { result.ConceptContext = context.Reverse().ToList(); result.KeywordToken = null; } }
private void SkipWhitespaces(ITokenReader reader) { int value; while ((value = reader.Read()) > 0) { if (!char.IsWhiteSpace((char)value)) { reader.Seek(-1, SeekOrigin.Current); return; } } }
public InternalConnection(IConnectionParameters parameters, Stream networkStream, ITokenReader reader, DbEnvironment environment) #endif { _parameters = parameters; _networkStream = networkStream; _reader = reader; _environment = environment; _environment.PacketSize = parameters.PacketSize; //server might decide to change the packet size later anyway _environment.UseAseDecimal = parameters.UseAseDecimal; _sendMutex = new object(); #if ENABLE_ARRAY_POOL _arrayPool = arrayPool; #endif }
public LocalNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { var startLocation = tokenReader.CurrentLocation; VariableName = tokenReader.ReadToken(); if(VariableName == null) { throw new MissingDataException("Expected a variable name", startLocation); } if(VariableName.TokenType != TokenType.Identifier) { throw new BadDataException("Expected a variable name", VariableName.Location); } }
public ValueOrError <IConceptInfo> Parse(ITokenReader tokenReader, Stack <IConceptInfo> context) { if (tokenReader.ReadText().Value == Keyword) { if (tokenReader.TryRead("-")) { return(new SimpleConceptInfo("", "")); } else { return(ValueOrError.CreateError(ErrorMessage)); } } return(ValueOrError <IConceptInfo> .CreateError("")); }
private NodeBase ReadShorthandNode(ITokenReader tokenReader) { ICollection<Token> tokens = new List<Token>(); Token curToken; while((curToken = tokenReader.ReadToken()) != null) { tokens.Add(curToken); } if(tokens.Count((token) => token.TokenType != TokenType.WhiteSpace) == 0) { return null; } return ExpressionRewriter.Rewrite(tokens); }
/* * 整型变量声明语句,如:int a; int b = 2*3; * **/ private SimpleASTNode intDeclare(ITokenReader tokens) { SimpleASTNode node = null; IToken token = tokens.peek(); if (token.getType() == TokenType.Int) { tokens.read(); token = tokens.peek(); if (token.getType() == TokenType.Identifier) { token = tokens.read(); node = new SimpleASTNode(ASTNodeType.IntDeclaration, token.getText()); token = tokens.peek(); if (token.getType() == TokenType.Assignment) { tokens.read(); SimpleASTNode child = additive(tokens); if (child != null) { node.addChild(child); } else { throw (new FormatException("'='赋值表达式错误")); } } } else { throw (new FormatException("需要一个变量名")); } if (node != null) { token = tokens.peek(); if (token != null && token.getType() == TokenType.SemiColon) { tokens.read(); } else { throw (new FormatException("表达式缺少分号")); } } } return(node); }
private ExpressionRewriter(ITokenReader tokenReader) { if(tokenReader == null) { throw new ArgumentNullException("tokenReader"); } this.tokens = new Queue<Token>(); Token token; while((token = tokenReader.ReadToken()) != null) { if(token.TokenType != TokenType.WhiteSpace) { this.tokens.Enqueue(token); } } }
public DefineNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : this(directiveName, location) { var startLocation = tokenReader.CurrentLocation; Token variable = tokenReader.ReadToken(); if(variable == null) { throw new MissingDataException("Variable name", startLocation); } if(variable.TokenType != TokenType.Identifier) { throw new MissingDataException("Variable name", variable.Location); } Variable = variable.ToString(); // TODO Extract method. var functionParameters = new List<string>(); FunctionParameters = functionParameters; Token token = tokenReader.ReadToken(); if(token != null && token.IsSymbol("(")) { while(token != null && !token.IsSymbol(")")) { token = tokenReader.ReadToken(); if(token.TokenType == TokenType.Identifier) { functionParameters.Add(token.Value.ToString()); } } if(token == null) { throw new MissingDataException("Closing parentheses"); } token = null; } var shorthand = ReadShorthandNode(tokenReader, token); if(shorthand != null) { ChildrenNodes = new List<NodeBase> { shorthand }; } else { ChildrenNodes = new List<NodeBase>(nodeReader.TakeWhile((node) => !IsEndDirective(node, DirectiveName))); } }
public ValueOrError <ConceptSyntaxNode> Parse(ITokenReader tokenReader, Stack <ConceptSyntaxNode> context, out List <string> warnings) { warnings = null; if (tokenReader.ReadText().Value == Keyword) { if (tokenReader.TryRead("-")) { var node = new ConceptSyntaxNode(new ConceptType()); node.Parameters[0] = ""; node.Parameters[1] = ""; } else { return(ValueOrError.CreateError(ErrorMessage)); } } return(ValueOrError <ConceptSyntaxNode> .CreateError("")); }
public virtual ValueOrError <ConceptSyntaxNode> Parse(ITokenReader tokenReader, Stack <ConceptSyntaxNode> context, out List <string> warnings) { warnings = null; _warnings = null; if (tokenReader.TryRead(_conceptType.Keyword)) { var lastConcept = context.Count > 0 ? context.Peek() : null; bool parsedFirstReferenceElement = false; var result = ParseMembers(tokenReader, lastConcept, false, ref parsedFirstReferenceElement); if (!result.IsError) { warnings = _warnings; } return(result); } else { return(ValueOrError <ConceptSyntaxNode> .CreateError("")); } }
/* * 语法解析:基础表达式 * **/ private SimpleASTNode primary(ITokenReader tokens) { SimpleASTNode node = null; IToken token = tokens.peek(); if (token != null) { if (token.getType() == TokenType.IntLiteral) { token = tokens.read(); node = new SimpleASTNode(ASTNodeType.IntLiteral, token.getText()); } else if (token.getType() == TokenType.Identifier) { token = tokens.read(); node = new SimpleASTNode(ASTNodeType.Identifier, token.getText()); } else if (token.getType() == TokenType.LeftParen) { token = tokens.read(); node = additive(tokens); if (node != null) { token = tokens.peek(); if (token != null && token.getType() == TokenType.RightParen) { tokens.read(); } else { throw (new FormatException("缺少右括号")); } } else { throw (new FormatException("无效的表达式")); } } } return(node); }
internal void ChangeFormat(System.Text.Encoding encoding) { if (encoding == null) { return; } StreamTokenReader reader = _input as StreamTokenReader; if (reader == null) { return; } Stream stream = reader._in.BaseStream; String fakeReadString = new String(new char[reader.NumCharEncountered]); stream.Position = reader._in.CurrentEncoding.GetByteCount(fakeReadString); _input = new StreamTokenReader(new StreamReader(stream, encoding)); }
private ValueOrError <IConceptInfo> ParseMembers(ITokenReader tokenReader, IConceptInfo useLastConcept, bool readingAReference, ref bool parsedFirstReferenceElement) { IConceptInfo conceptInfo = (IConceptInfo)Activator.CreateInstance(ConceptInfoType); bool firstMember = true; var listOfMembers = readingAReference ? Members.Where(m => m.IsKey) : Members.Where(m => m.IsParsable); var parentProperty = listOfMembers.LastOrDefault(member => member.IsParentNested) ?? (listOfMembers.First().IsConceptInfo ? listOfMembers.First() : null); if (useLastConcept != null && parentProperty == null) { return(ValueOrError <IConceptInfo> .CreateError($"This concept cannot be nested within {useLastConcept.GetType().Name}. Trying to read {ConceptInfoType.Name}.")); } foreach (ConceptMember member in listOfMembers) { if (!readingAReference) { parsedFirstReferenceElement = false; // Reset a reference elements group, that should separated by dot. } var valueOrError = ReadMemberValue(member, tokenReader, member == parentProperty ? useLastConcept : null, firstMember, ref parsedFirstReferenceElement, readingAReference); OnMemberRead?.Invoke(tokenReader, conceptInfo, member, valueOrError); if (valueOrError.IsError) { return(ValueOrError <IConceptInfo> .CreateError(string.Format(CultureInfo.InvariantCulture, "Cannot read the value of {0} in {1}. {2}", member.Name, ConceptInfoType.Name, valueOrError.Error))); } member.SetMemberValue(conceptInfo, valueOrError.Value); firstMember = false; } return(ValueOrError <IConceptInfo> .CreateValue(conceptInfo)); }
private bool TryRead(ref ReadOnlySequence <byte> buffer, ref ITokenReaderState tokenReaderState, out ReadOnlySequence <byte> token, out long consumedBytes) { token = new ReadOnlySequence <byte>(); consumedBytes = 0; long bufferLength = buffer.Length; if (bufferLength < this.MaximumMessageSize) { ITokenReader tokenReader = this.CreateTokenReader(); return(tokenReader.Read(ref tokenReaderState, ref buffer, out token, out consumedBytes)); } else { throw Assert.Exception(new InvalidOperationException($"Maximum message size exceeded.")); } }
private ValueOrError <ConceptSyntaxNode> ParseMembers(ITokenReader tokenReader, ConceptSyntaxNode useLastConcept, bool readingAReference, ref bool parsedFirstReferenceElement) { ConceptSyntaxNode node = new ConceptSyntaxNode(_conceptType); bool firstMember = true; var listOfMembers = readingAReference ? _conceptType.Members.Where(m => m.IsKey) : _conceptType.Members.Where(m => m.IsParsable); var parentProperty = GetParentProperty(listOfMembers); if (useLastConcept != null && parentProperty == null) { return(ValueOrError <ConceptSyntaxNode> .CreateError($"This concept cannot be nested within {useLastConcept.Concept.TypeName}. Trying to read {_conceptType.TypeName}.")); } foreach (ConceptMemberSyntax member in listOfMembers) { if (!readingAReference) { parsedFirstReferenceElement = false; // Reset a reference elements group, that should separated by dot. } var valueOrError = ReadMemberValue(member, tokenReader, member == parentProperty ? useLastConcept : null, firstMember, ref parsedFirstReferenceElement, readingAReference); OnMemberRead?.Invoke(tokenReader, node, member, valueOrError); if (valueOrError.IsError) { return(ValueOrError <ConceptSyntaxNode> .CreateError(string.Format(CultureInfo.InvariantCulture, "Cannot read the value of {0} in {1}. {2}", member.Name, _conceptType.TypeName, valueOrError.Error))); } member.SetMemberValue(node, valueOrError.Value); firstMember = false; } return(ValueOrError <ConceptSyntaxNode> .CreateValue(node)); }
/// <summary> /// Parses a line containing a directive reference. /// </summary> /// <param name="nameExpression">The regular expression to match the name of the directive.</param> /// <param name="line">The line containing the directive.</param> /// <param name="location">The location of the directive.</param> /// <param name="parameterReader">The reader of the parsed directive parameters.</param> /// <param name="directiveName">The parsed name of the directive.</param> /// <returns>True if the directive was successfully parsed; otherwise, false.</returns> private static bool ParseDirectiveLine(string nameExpression, string line, Location location, out ITokenReader parameterReader, out string directiveName) { parameterReader = null; directiveName = null; Regex re = new Regex("^#(?<name>" + nameExpression + ")(\\s+(?<params>.*))?$", RegexOptions.ExplicitCapture); var match = re.Match(line); if(!match.Success) { return false; } directiveName = match.Groups["name"].Value; var parametersText = match.Groups["params"].Value; var parametersLocation = location.Clone(); parametersLocation.AdvanceString(line.Substring(0, match.Groups["params"].Index)); var parametersReader = new LocatedTextReaderWrapper(parametersText, parametersLocation); parameterReader = new TokenReader(parametersReader); return true; }
private NodeBase ReadShorthandNode(ITokenReader tokenReader, params Token[] previousTokens) { ICollection<Token> tokens = new List<Token>(previousTokens.Where((token) => token != null)); Token curToken; while((curToken = tokenReader.ReadToken()) != null) { tokens.Add(curToken); } return ExpressionRewriter.Rewrite(tokens); }
internal void ChangeFormat( System.Text.Encoding encoding ) { if (encoding == null) { return; } StreamTokenReader reader = _input as StreamTokenReader; if (reader == null) { return; } Stream stream = reader._in.BaseStream; String fakeReadString = new String( new char[reader.NumCharEncountered] ); stream.Position = reader._in.CurrentEncoding.GetByteCount( fakeReadString ); _input = new StreamTokenReader( new StreamReader( stream, encoding ) ); }
internal Tokenizer (char[] array) { LineNo = 1 ; _fintag = false ; _cstr = null ; _input = new CharTokenReader(array) ; _sbarray = new char[_sbmaxsize]; _sbindex = 0; }
public ValueOrError<object> ReadMemberValue(ConceptMember member, ITokenReader tokenReader, IConceptInfo lastConcept, bool firstMember, ref bool lastPropertyWasInlineParent, ref bool lastConceptUsed, bool readingAReference) { try { if (lastPropertyWasInlineParent && member.IsKey && !member.IsConceptInfo) // TODO: Removing "IsConceptInfo" from this condition would produce a mismatch. Think of a better solution for parsing the concept key. { if (!tokenReader.TryRead(".")) return ValueOrError<object>.CreateError(string.Format( "Parent property and the following key value ({0}) must be separated with a dot. Expected \".\"", member.Name)); } lastPropertyWasInlineParent = false; if (member.IsStringType) return tokenReader.ReadText(); if (member.ValueType == typeof(IConceptInfo)) if (firstMember && lastConcept != null) { lastConceptUsed = true; return (object)lastConcept; } else return ValueOrError<object>.CreateError("Member of type IConceptInfo can only be used as a first member and enclosed within the referenced parent concept."); if (member.IsConceptInfo && lastConcept != null && member.ValueType.IsInstanceOfType(lastConcept) && member.ValueType.IsAssignableFrom(ConceptInfoType)) // Recursive "parent" property { lastConceptUsed = true; return (object)lastConcept; } if (member.IsConceptInfo && lastConcept != null && member.ValueType.IsInstanceOfType(lastConcept) && firstMember) { lastConceptUsed = true; return (object)lastConcept; } if (member.IsConceptInfo && firstMember) { if (member.ValueType == ConceptInfoType) return ValueOrError.CreateError(string.Format( "Recursive concept {0} cannot be used as a root because its parent property ({1}) must reference another concept. Use a non-recursive concept for the root and a derivation of the root concept with additional parent property as a recursive concept.", ConceptInfoHelper.GetKeywordOrTypeName(ConceptInfoType), member.Name)); if (!readingAReference && Members.Where(m => m.IsParsable).Count() == 1) { // This validation is not necessary for consistent parsing. It is enforced simply to avoid ambiguity when parsing // similar concepts such as "Logging { AllProperties; }", "History { AllProperties; }" and "Persisted { AllProperties; }". var parentMembers = ConceptMembers.Get(member.ValueType).Where(m => m.IsParsable).ToArray(); if (parentMembers.Count() == 1 && parentMembers.Single().IsConceptInfo) return ValueOrError.CreateError(string.Format( "{0} must be enclosed within the referenced parent concept {1}. A single-reference concept that references another single-reference concept must always be used with embedded syntax to avoid ambiguity.", ConceptInfoHelper.GetKeywordOrTypeName(ConceptInfoType), ConceptInfoHelper.GetKeywordOrTypeName(member.ValueType))); } GenericParser subParser = new GenericParser(member.ValueType, ""); lastConceptUsed = true; lastPropertyWasInlineParent = true; return subParser.ParseMembers(tokenReader, lastConcept, true).ChangeType<object>(); } if (member.IsConceptInfo) { GenericParser subParser = new GenericParser(member.ValueType, ""); return subParser.ParseMembers(tokenReader, null, true).ChangeType<object>(); } return ValueOrError.CreateError(string.Format( "GenericParser does not support members of type \"{0}\". Try using string or implementation of IConceptInfo.", member.ValueType.Name)); } catch (DslSyntaxException ex) { return ValueOrError<object>.CreateError(ex.Message); } }
public virtual ValueOrError<IConceptInfo> Parse(ITokenReader tokenReader, Stack<IConceptInfo> context) { if (tokenReader.TryRead(Keyword)) return ParseMembers(tokenReader, context.Count > 0 ? context.Peek() : null, false); return ValueOrError<IConceptInfo>.CreateError(""); }
private ValueOrError<IConceptInfo> ParseMembers(ITokenReader tokenReader, IConceptInfo lastConcept, bool readingAReference) { IConceptInfo conceptInfo = (IConceptInfo)Activator.CreateInstance(ConceptInfoType); bool firstMember = true; bool lastPropertyWasInlineParent = false; bool lastConceptUsed = false; var listOfMembers = readingAReference ? Members.Where(m => m.IsKey) : Members.Where(m => m.IsParsable); foreach (ConceptMember member in listOfMembers) { var valueOrError = ReadMemberValue(member, tokenReader, lastConcept, firstMember, ref lastPropertyWasInlineParent, ref lastConceptUsed, readingAReference); if (valueOrError.IsError) return ValueOrError<IConceptInfo>.CreateError(string.Format(CultureInfo.InvariantCulture, "Cannot read the value of {0} in {1}. {2}", member.Name, ConceptInfoType.Name, valueOrError.Error)); member.SetMemberValue(conceptInfo, valueOrError.Value); firstMember = false; } if (!lastConceptUsed && lastConcept != null) return ValueOrError<IConceptInfo>.CreateError(string.Format( "This concept cannot be enclosed within {0}. Trying to read {1}.", lastConcept.GetType().Name, ConceptInfoType.Name)); return ValueOrError<IConceptInfo>.CreateValue(conceptInfo); }
public TokenConsumer(ITokenReader reader, IEnumerable<ITokenValidationRule> rules) { _reader = reader; _rules = rules; }
public ElseNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { }
/// <summary> /// Creates an instance of a directive. /// </summary> /// <param name="nodeType">Type of the directive node.</param> /// <param name="tokenReader">The token reader.</param> /// <param name="nodeReader">The node reader.</param> /// <param name="directiveName">Name of the directive.</param> /// <param name="location">The location of the directive.</param> /// <returns>The new <see cref="DirectiveNode"/> instance.</returns> private static DirectiveNode CreateInstance(Type nodeType, ITokenReader tokenReader, INodeReader nodeReader, string directiveName, Location location) { return Activator.CreateInstance(nodeType, tokenReader, nodeReader, directiveName, location) as DirectiveNode; }
internal void ChangeFormat(System.Text.Encoding encoding) { if (encoding == null) { return; } Contract.Assert(_inSavedCharacter == -1, "There was a lookahead character at the stream change point, that means the parser is changing encodings too late"); switch (_inTokenSource) { case TokenSource.UnicodeByteArray: case TokenSource.UTF8ByteArray: case TokenSource.ASCIIByteArray: // these are the ones we can change on the fly if (encoding == System.Text.Encoding.Unicode) { _inTokenSource = TokenSource.UnicodeByteArray; return; } if (encoding == System.Text.Encoding.UTF8) { _inTokenSource = TokenSource.UTF8ByteArray; return; } #if FEATURE_ASCII if (encoding == System.Text.Encoding.ASCII) { _inTokenSource = TokenSource.ASCIIByteArray; return; } #endif break; case TokenSource.String: case TokenSource.CharArray: case TokenSource.NestedStrings: // these are already unicode and encoding changes are moot // they can't be further decoded return; } // if we're here it means we don't know how to change // to the desired encoding with the memory that we have // we'll have to do this the hard way -- that means // creating a suitable stream from what we've got // this is thankfully the rare case as UTF8 and unicode // dominate the scene Stream stream = null; switch (_inTokenSource) { case TokenSource.UnicodeByteArray: case TokenSource.UTF8ByteArray: case TokenSource.ASCIIByteArray: stream = new MemoryStream(_inBytes, _inIndex, _inSize - _inIndex); break; case TokenSource.CharArray: case TokenSource.String: case TokenSource.NestedStrings: Contract.Assert(false, "attempting to change encoding on a non-changable source, should have been prevented earlier"); return; default: StreamTokenReader reader = _inTokenReader as StreamTokenReader; if (reader == null) { Contract.Assert(false, "A new input source type has been added to the Tokenizer but it doesn't support encoding changes"); return; } stream = reader._in.BaseStream; Contract.Assert(reader._in.CurrentEncoding != null, "Tokenizer's StreamReader does not have an encoding"); String fakeReadString = new String(' ', reader.NumCharEncountered); stream.Position = reader._in.CurrentEncoding.GetByteCount(fakeReadString); break; } Contract.Assert(stream != null, "The XML stream with new encoding was not properly initialized for kind of input we had"); // we now have an initialized memory stream based on whatever source we had before _inTokenReader = new StreamTokenReader(new StreamReader(stream, encoding)); _inTokenSource = TokenSource.Other; }
internal Tokenizer (StreamReader input) { BasicInitialization(); _inTokenReader = new StreamTokenReader(input); }
internal Tokenizer (Stream input) { LineNo = 1 ; _fintag = false ; _cstr = null ; _input = new StreamTokenReader(new StreamReader( input )) ; _sbarray = new char[_sbmaxsize]; _sbindex = 0; }
internal void ChangeFormat( System.Text.Encoding encoding ) { if (encoding == null) { return; } Contract.Assert( _inSavedCharacter == -1, "There was a lookahead character at the stream change point, that means the parser is changing encodings too late" ); switch (_inTokenSource) { case TokenSource.UnicodeByteArray: case TokenSource.UTF8ByteArray: case TokenSource.ASCIIByteArray: // these are the ones we can change on the fly if (encoding == System.Text.Encoding.Unicode) { _inTokenSource = TokenSource.UnicodeByteArray; return; } if (encoding == System.Text.Encoding.UTF8) { _inTokenSource = TokenSource.UTF8ByteArray; return; } #if FEATURE_ASCII if (encoding == System.Text.Encoding.ASCII) { _inTokenSource = TokenSource.ASCIIByteArray; return; } #endif break; case TokenSource.String: case TokenSource.CharArray: case TokenSource.NestedStrings: // these are already unicode and encoding changes are moot // they can't be further decoded return; } // if we're here it means we don't know how to change // to the desired encoding with the memory that we have // we'll have to do this the hard way -- that means // creating a suitable stream from what we've got // this is thankfully the rare case as UTF8 and unicode // dominate the scene Stream stream = null; switch (_inTokenSource) { case TokenSource.UnicodeByteArray: case TokenSource.UTF8ByteArray: case TokenSource.ASCIIByteArray: stream = new MemoryStream(_inBytes, _inIndex, _inSize - _inIndex); break; case TokenSource.CharArray: case TokenSource.String: case TokenSource.NestedStrings: Contract.Assert(false, "attempting to change encoding on a non-changable source, should have been prevented earlier" ); return; default: StreamTokenReader reader = _inTokenReader as StreamTokenReader; if (reader == null) { Contract.Assert(false, "A new input source type has been added to the Tokenizer but it doesn't support encoding changes"); return; } stream = reader._in.BaseStream; Contract.Assert( reader._in.CurrentEncoding != null, "Tokenizer's StreamReader does not have an encoding" ); String fakeReadString = new String(' ', reader.NumCharEncountered); stream.Position = reader._in.CurrentEncoding.GetByteCount( fakeReadString ); break; } Contract.Assert(stream != null, "The XML stream with new encoding was not properly initialized for kind of input we had"); // we now have an initialized memory stream based on whatever source we had before _inTokenReader = new StreamTokenReader( new StreamReader( stream, encoding ) ); _inTokenSource = TokenSource.Other; }
public ValueOrError<IConceptInfo> Parse(ITokenReader tokenReader, Stack<IConceptInfo> context) { if (tokenReader.ReadText() == Keyword) { tokenReader.Read("-", ErrorMessage); return new SimpleConceptInfo("", ""); } return ValueOrError<IConceptInfo>.CreateError(""); }
public static TokenNode Rewrite(ITokenReader tokenReader) { return (new ExpressionRewriter(tokenReader)).Rewrite(); }