public void KeywordMultiple() { Tokenizer t = new Tokenizer(StringToStream("true false true false null stream endstream " + "obj endobj R xref trailer startxref")); TokenKeyword k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.True); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.False); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.True); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.False); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.Null); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.Stream); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.EndStream); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.Obj); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.EndObj); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.R); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.XRef); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.Trailer); k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.StartXRef); Assert.True(t.GetToken() is TokenEmpty); }
public void KeywordStream() { Tokenizer t = new Tokenizer(StringToStream("stream")); TokenKeyword k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.Stream); Assert.True(t.GetToken() is TokenEmpty); }
public void KeywordEndObj() { Tokenizer t = new Tokenizer(StringToStream("endobj")); TokenKeyword k = t.GetToken() as TokenKeyword; Assert.NotNull(k); Assert.True(k.Value == ParseKeyword.EndObj); Assert.True(t.GetToken() is TokenEmpty); }
public Token(TokenType type, TokenKeyword keyword, String expression, String formatOrlLocalName, String modifiers, Int32 position) { this.Type = type; this.Keyword = keyword; this.Expression = expression; this.FormatOrLocalName = formatOrlLocalName; this.Modifiers = modifiers; this.Position = position; }
public void OneOfEachSpaced() { Tokenizer t = new Tokenizer(StringToStream(" [ <64 65> << true /Name 1 3.14 >> ] %comment")); t.IgnoreComments = false; TokenArrayOpen a1 = t.GetToken() as TokenArrayOpen; Assert.NotNull(a1); TokenStringHex a2 = t.GetToken() as TokenStringHex; Assert.NotNull(a2); Assert.True(a2.Raw == "64 65"); TokenDictionaryOpen a3 = t.GetToken() as TokenDictionaryOpen; Assert.NotNull(a3); TokenKeyword a4 = t.GetToken() as TokenKeyword; Assert.NotNull(a4); Assert.True(a4.Value == ParseKeyword.True); TokenName a5 = t.GetToken() as TokenName; Assert.NotNull(a5); Assert.True(a5.Value == "Name"); TokenInteger a6 = t.GetToken() as TokenInteger; Assert.NotNull(a6); Assert.True(a6.Value == 1); TokenReal a7 = t.GetToken() as TokenReal; Assert.NotNull(a7); Assert.True(a7.Value == 3.14f); TokenDictionaryClose a8 = t.GetToken() as TokenDictionaryClose; Assert.NotNull(a8); TokenArrayClose a9 = t.GetToken() as TokenArrayClose; Assert.NotNull(a9); TokenComment a10 = t.GetToken() as TokenComment; Assert.NotNull(a10); Assert.True(a10.Value == "%comment"); Assert.True(t.GetToken() is TokenEmpty); }
private TokenKeyword Consume(KeywordType keywordType) { TokenKeyword keyword = this.GetKeyword(); if (keyword.KeywordType == keywordType) { return(keyword); } else { throw new ParseFailureException(string.Format("Expected keyword {0} but got {1}.", keywordType.ToString(), keyword.KeywordType), keyword.Location); } }
private bool PeekKeyword(KeywordType keywordType) { TokenKeyword tokenKeyword = this.PeekAs <TokenKeyword>(); if (tokenKeyword == null) { return(false); } else { return(tokenKeyword.KeywordType == keywordType); } }
private bool TryConsume(KeywordType keywordType) { TokenKeyword tokenKeyword = this.PeekAs <TokenKeyword>(); if (tokenKeyword == null || tokenKeyword.KeywordType != keywordType) { return(false); } else { this.m_queue.Dequeue(); return(true); } }
private bool ParseToken() { bool result = true; TokenComment tokenComment = new TokenComment(this); TokenKeyword tokenKeyword = new TokenKeyword(this); TokenLeftBracket tokenLeftBracket = new TokenLeftBracket(this); TokenRightBracket tokenRightBracket = new TokenRightBracket(this); TokenString tokenString = new TokenString(this); TokenComma tokenComma = new TokenComma(this); TokenFloat tokenFloat = new TokenFloat(this); TokenInt tokenInt = new TokenInt(this); TokenBool tokenBool = new TokenBool(this); TokenLeftCurlyBracket tokenLeftCurlyBracket = new TokenLeftCurlyBracket(this); TokenRightCurlyBracket tokenRightCurlyBracket = new TokenRightCurlyBracket(this); if (tokenComment.TryToGet()) { this.m_Tokens.Add(tokenComment); } else if (tokenBool.TryToGet()) { this.m_Tokens.Add(tokenBool); } else if (tokenKeyword.TryToGet()) { this.m_Tokens.Add(tokenKeyword); } else if (tokenLeftBracket.TryToGet()) { this.m_Tokens.Add(tokenLeftBracket); } else if (tokenRightBracket.TryToGet()) { this.m_Tokens.Add(tokenRightBracket); } else if (tokenString.TryToGet()) { this.m_Tokens.Add(tokenString); } else if (tokenComma.TryToGet()) { this.m_Tokens.Add(tokenComma); } else if (tokenFloat.TryToGet()) { this.m_Tokens.Add(tokenFloat); } else if (tokenInt.TryToGet()) { this.m_Tokens.Add(tokenInt); } else if (tokenLeftCurlyBracket.TryToGet()) { this.m_Tokens.Add(tokenLeftCurlyBracket); } else if (tokenRightCurlyBracket.TryToGet()) { this.m_Tokens.Add(tokenRightCurlyBracket); } else { result = false; } return(result); }
private MofSpecification ParseMofSpecification() { Token token; DocumentRange documentRange; List <MofProduction> mofProductions = new List <MofProduction>(); if (this.m_queue.Count > 0) { if (this.m_queue.Dequeue().Type == TokenType.StartOfInput) { QualifierList qualifierList = null; while (true) { if (this.m_queue.Count <= 0) { documentRange = new DocumentRange(); throw new ParseFailureException("No EndOfInput token found at the end of the input", documentRange); } token = this.m_queue.Peek(); TokenType type = token.Type; if (type == TokenType.EndOfInput) { if (qualifierList == null) { return(new MofSpecification(mofProductions.ToArray())); } else { throw new ParseFailureException("Found qualifiers that are not applied to any production.", token.Location); } } else { if (type == TokenType.Identifier) { if (qualifierList == null) { qualifierList = new QualifierList(new Qualifier[0]); } TokenIdentifier tokenIdentifier = (TokenIdentifier)token; if (!tokenIdentifier.IsKeyword) { throw new ParseFailureException(string.Format("Unexpected identifier: {0}", tokenIdentifier), tokenIdentifier.Location); } else { TokenKeyword tokenKeyword = (TokenKeyword)token; KeywordType keywordType = tokenKeyword.KeywordType; if (keywordType == KeywordType.CLASS) { mofProductions.Add(this.ParseClass(qualifierList)); qualifierList = null; continue; } else { if (keywordType != KeywordType.INSTANCE) { } throw new ParseFailureException(string.Format("Unexpected keyword: {0}", tokenKeyword), tokenKeyword.Location); } } } else if (type == TokenType.Alias) { break; } else if (type == TokenType.Pragma) { if (qualifierList == null) { mofProductions.Add(this.ParseCompilerDirective()); continue; } else { throw new ParseFailureException("Qualifiers are not legal on pragmas.", token.Location); } } if (type != TokenType.OpenBracket) { break; } qualifierList = this.ParseQualifierList(); } } throw new ParseFailureException("Unexpected token", token.Location); } else { DocumentCoordinate documentCoordinate = new DocumentCoordinate(); DocumentCoordinate documentCoordinate1 = new DocumentCoordinate(); throw new ParseFailureException("Expected a StartOfInput token.", new DocumentRange(this.m_filePath, documentCoordinate, documentCoordinate1)); } } documentRange = new DocumentRange(); throw new ParseFailureException("No EndOfInput token found at the end of the input", documentRange); }
private DataType ParseBuiltInType() { DataType flag; TokenKeyword keyword = this.GetKeyword(); KeywordType keywordType = keyword.KeywordType; if (keywordType == KeywordType.DT_BOOL) { flag = MofDataType.Bool; } else if (keywordType == KeywordType.DT_CHAR16) { flag = MofDataType.Char16; } else if (keywordType == KeywordType.DT_DATETIME) { flag = MofDataType.DateTime; } else if (keywordType == KeywordType.DT_REAL32) { flag = MofDataType.Real32; } else if (keywordType == KeywordType.DT_REAL64) { flag = MofDataType.Real64; } else if (keywordType == KeywordType.DT_SINT16) { flag = MofDataType.SInt16; } else if (keywordType == KeywordType.DT_SINT32) { flag = MofDataType.SInt32; } else if (keywordType == KeywordType.DT_SINT64) { flag = MofDataType.SInt64; } else if (keywordType == KeywordType.DT_SINT8) { flag = MofDataType.SInt8; } else if (keywordType == KeywordType.DT_STR) { flag = MofDataType.String; } else if (keywordType == KeywordType.DT_UINT16) { flag = MofDataType.UInt16; } else if (keywordType == KeywordType.DT_UINT32) { flag = MofDataType.UInt32; } else if (keywordType == KeywordType.DT_UINT64) { flag = MofDataType.UInt64; } else if (keywordType == KeywordType.DT_UINT8) { flag = MofDataType.UInt8; } else { throw new ParseFailureException(string.Format("Expected a built-in data type, but got: {0}", (object)keyword.KeywordType.ToString()), keyword.Location); } return(flag); }