public static bool TryDeserialize(string json, out Token token) { try { token = JsonConvert.DeserializeObject<Token>(json); return true; } catch (Exception) { token = null; return false; } }
static void Main(string[] args) { // this key should be shared between authorization server and publisher const string Key = "SecretKey"; using (var context = NetMQContext.Create()) { using (var response = context.CreateResponseSocket()) { response.Bind("tcp://*:5557"); while (true) { var requestMessage = response.ReceiveMessage(); string command = requestMessage.Pop().ConvertToString(); if (command == AuthorizationProtocol.GetTokenCommand && requestMessage.FrameCount == 3) { string username = requestMessage.Pop().ConvertToString(); string password = requestMessage.Pop().ConvertToString(); string subscription = requestMessage.Pop().ConvertToString(); // TODO: validating username and password is not part // of the example // TODO: validate that the user has permission to // the subscription is not part of the example Console.WriteLine("Received GetTokenCommand {0} {1} {2}", username, password, subscription); // Create a token Token token = new Token(subscription, Key); // send token to the client response. SendMore(AuthorizationProtocol.SuccessReply). Send(token.Serialize()); } else { // unsupported command response.Send(AuthorizationProtocol.ErrorReply); } } } } }
public Parse(int edit, string key, int index, int length, CsGlobalNamespace globals, Token[] comments, Token[] tokens) { Contract.Requires(!string.IsNullOrEmpty(key), "key is null or empty"); Contract.Requires(index >= 0, "index is negative"); Contract.Requires(length >= 0, "length is negative"); Contract.Requires(comments != null, "comments is null"); Contract.Requires(tokens != null, "tokens is null"); Contract.Requires(globals != null || length >= 0, "null globals but error length is not set"); Key = key; Edit = edit; ErrorIndex = index; ErrorLength = length; Globals = globals; Comments = comments; Tokens = tokens; }
// regular-string-literal: // " regular-string-literal-characters? " // // regular-string-literal-characters: // regular-string-literal-character // regular-string-literal-characters regular-string-literal-character // // regular-string-literal-character: // single-regular-string-literal-character // simple-escape-sequence // hexadecimal-escape-sequence // unicode-escape-sequence // // single-regular-string-literal-character: // Any character except " (U+0022), \ (U+005C), and new-line-character private void DoScanString() { int offset = m_index; ++m_index; while (Current != '"' && Current != '\n' && Current != '\r' && Current != '\x00') { if (Current == '\\' && Next == '\\') ++m_index; else if (Current == '\\' && Next == '"') ++m_index; ++m_index; } if (Current == '"') { ++m_index; m_token = new Token(m_text, offset, m_index - offset, m_line, TokenKind.String); } else { throw new ScannerException("Expected a terminating '\"' on line {0}", m_line); } }
// decimal-integer-literal: // decimal-digits integer-type-suffix? private void DoScanNumber() { int offset = m_index; while (Current >= '0' && Current <= '9') { ++m_index; } if (Current == '.') { ++m_index; DoScanFloat(offset); } else if (Current == 'e' || Current == 'E') { DoScanExponent(offset); } else { if (Current == 'u' || Current == 'l' || Current == 'U' || Current == 'L') { if (Next == 'u' || Next == 'l' || Next == 'U' || Next == 'L') m_index += 2; else m_index += 1; } else if (Current == 'f' || Current == 'd' || Current == 'm' || Current == 'F' || Current == 'D' || Current == 'M') { m_index += 1; } m_token = new Token(m_text, offset, m_index - offset, m_line, TokenKind.Number); } }
// hexadecimal-integer-literal: // 0x hex-digits integer-type-suffix? // 0X hex-digits integer-type-suffix? // // integer-type-suffix: one of // U u L l UL Ul uL ul LU Lu lU lu private void DoScanHexNumber() { int offset = m_index; m_index += 2; while ((Current >= '0' && Current <= '9') || (Current >= 'a' && Current <= 'f') || (Current >= 'A' && Current <= 'F')) { ++m_index; } if (Current == 'u' || Current == 'l' || Current == 'U' || Current == 'L') { if (Next == 'u' || Next == 'l' || Next == 'U' || Next == 'L') m_index += 2; else m_index += 1; } m_token = new Token(m_text, offset, m_index - offset, m_line, TokenKind.Number); }
private void DoAdvance(char* buffer) { m_buffer = buffer; // skip whitespace and comments while (true) { int old = m_index; if (char.IsWhiteSpace(Current)) DoSkipWhiteSpace(); else if (Current == '#') DoSkipPreprocessor(); else if (Current == '/' && Next == '/') DoSkipSingleLineComment(); else if (Current == '/' && Next == '*') DoSkipDelimitedComment(); else break; Contract.Assert(m_index != old, string.Format("failed to advance from {0} '{1}' on line {2}", Token.Kind, Token.Text().EscapeAll(), Token.Line)); } // identifier if (CsHelpers.CanStartIdentifier(Current) || Current == '_') { DoScanIdentifier(); } else if (Current == '@' && (CsHelpers.CanStartIdentifier(Next) || Next == '_')) { ++m_index; DoScanIdentifier(); } // number else if (Current == '0' && (Next == 'x' || Next == 'X')) { DoScanHexNumber(); } else if (Current >= '0' && Current <= '9') { DoScanNumber(); } else if (Current == '.' && Next >= '0' && Next <= '9') { ++m_index; DoScanFloat(m_index - 1); } // char else if (Current == '\'') { DoScanChar(); } // string else if (Current == '"') { DoScanString(); } // verbatim string else if (Current == '@' && Next == '"') { DoScanVerbatimString(); } // punctuation else if (char.IsPunctuation(Current) || char.IsSymbol(Current)) { DoScanPunct(); } // eof else if (Current == '\x00') { m_token = new Token(m_text, m_token.Line); ++m_index; } // catch all else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Other); ++m_index; } }
// StringLiteral := '"' StringChar+ '"' // StringChar := any char but '"' or '""' private void DoScanString() { int line = m_line; int offset = m_index; m_index = m_index + 1; while (Current != '\x00') { if (char.IsWhiteSpace(Current)) DoSkipWhiteSpace(); else if (Current == '"' && Next == '"') m_index = m_index + 2; else if (Current == '"') break; else ++m_index; } if (Current == '"') { ++m_index; m_token = new Token(m_text, offset, m_index - offset, line, TokenKind.String); } else throw new ScannerException(m_line, "Expected a terminating '\"' for."); }
// Identifier := IdentifierStart IdentifierSuffix* // IdentifierStart := ascii letter or underscore // IdentifierSuffix := ascii letter, ascii digit, or underscore private void DoScanIdentifier() { int offset = m_index; while (char.IsLetterOrDigit(Current) || Current == '_') { ++m_index; } string name = m_text.Substring(offset, m_index - offset); if (ms_reserved.Contains(name)) throw new ScannerException(m_line, "{0} is a reserved word.", name); TokenKind kind = ms_keywords.Contains(name) ? TokenKind.Keyword : TokenKind.Identifier; m_token = new Token(m_text, offset, m_index - offset, m_line, kind); }
// enum-declaration: // attributes? enum-modifiers? enum identifier enum-base? enum-body ;? // // enum-base: // : integral-type // // enum-body: // { enum-member-declarations? } // { enum-member-declarations , } private CsType DoParseEnumDeclaration(CsAttribute[] attrs, MemberModifiers modifiers, Token first, MemberModifiers defaultAccess) { Token last = first; int nameOffset = m_scanner.Token.Offset; string name = DoParseIdentifier(ref last); string baseType = "int"; if (m_scanner.Token.IsPunct(":")) { m_scanner.Advance(); baseType = DoParseIdentifier(ref last); } DoParsePunct("{"); string[] names = DoParseEnumMemberDeclarations(); last = m_scanner.Token; DoParsePunct("}"); if (((int) modifiers & CsMember.AccessMask) == 0) modifiers |= defaultAccess; // ;? if (m_scanner.Token.IsPunct(";")) { last = m_scanner.Token; m_scanner.Advance(); } return new CsEnum(names, nameOffset, baseType, attrs, modifiers, name, first.Offset, last.Offset + last.Length - first.Offset, first.Line); }
// destructor-declaration: // attributes? extern? ~ identifier ( ) destructor-body // // destructor-body: // block // ; private void DoParseDestructorDeclaration(List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { Token last = first; int nameOffset = m_scanner.Token.Offset; string name = "~" + DoParseIdentifier(ref last); DoParsePunct("("); DoParsePunct(")"); last = m_scanner.Token; Token start = m_scanner.Token; Token open = new Token(); Token close = last; if (m_scanner.Token.IsPunct(";")) { m_scanner.Advance(); } else { DoSkipBody("{", "}", ref open, ref last); close = last; } CsBody body = open.Length > 0 ? new CsBody(name, start.Offset, open.Offset, close.Offset + close.Length - start.Offset, start.Line) : null; members.Add(new CsMethod(nameOffset, body, false, true, null, new CsParameter[0], null, "void", attrs, modifiers, name, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); }
// delegate-declaration: // attributes? delegate-modifiers? delegate return-type identifier // type-parameter-list? // ( formal-parameter-list? ) type-parameter-constraints-clauses? ; // // type-parameter-list: // < type-parameters > private CsType DoParseDelegateDeclaration(CsAttribute[] attrs, MemberModifiers modifiers, Token first, MemberModifiers defaultAccess) { Token last = m_scanner.Token; string rtype = DoParseReturnType(); int nameOffset = m_scanner.Token.Offset; string name = DoParseIdentifier(ref last); string gargs = null; if (m_scanner.Token.IsPunct("<")) { gargs = DoScanBody("<", ">", ref last); } var parms = new List<CsParameter>(); DoParsePunct("("); DoParseFormalParameterList(parms); DoParsePunct(")"); string constraints = DoParseTypeParameterConstraintsClauses(); last = DoParsePunct(";"); if (((int) modifiers & CsMember.AccessMask) == 0) modifiers |= defaultAccess; // ;? if (m_scanner.Token.IsPunct(";")) { last = m_scanner.Token; m_scanner.Advance(); } return new CsDelegate(nameOffset, constraints, parms.ToArray(), gargs, rtype, attrs, modifiers, name, first.Offset, last.Offset + last.Length - first.Offset, first.Line); }
// operator-declaration: // attributes? operator-modifiers operator-declarator operator-body // // conversion-operator-declarator: // implicit operator type ( type identifier ) // explicit operator type ( type identifier ) private void DoParseConversionOperatorDeclaration(bool isImplicit, List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { DoParseKeyword("operator"); int nameOffset = m_scanner.Token.Offset; string type = DoParseType(); var parms = new List<CsParameter>(); DoParsePunct("("); DoParseFormalParameterList(parms); DoParsePunct(")"); Token last = m_scanner.Token; CsBody body = null; if (m_scanner.Token.IsPunct(";")) { m_scanner.Advance(); } else { Token f = m_scanner.Token; Token start = m_scanner.Token; DoSkipBody("{", "}", ref f, ref last); body = new CsBody(isImplicit ? "op_Implict" : "op_Explict", start.Offset, f.Offset, last.Offset + last.Length - f.Offset, start.Line); } members.Add(new CsOperator(nameOffset, body, isImplicit, !isImplicit, parms.ToArray(), type, attrs, modifiers, type, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); }
// constructor-declaration: // attributes? constructor-modifiers? constructor-declarator constructor-body // // constructor-declarator: // identifier ( formal-parameter-list? ) constructor-initializer? // // constructor-initializer: // : base ( argument-list? ) // : this ( argument-list? ) // // constructor-body: // block // ; private void DoParseConstructorDeclaration(string name, int nameOffset, List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { var parms = new List<CsParameter>(); DoParseFormalParameterList(parms); DoParsePunct(")"); string gargs = null; int i = name.IndexOf('<'); if (i > 0) { int j = name.IndexOf('>'); gargs = name.Substring(i + 1, j - (i + 1)).TrimAll(); name = name.Substring(0, i); } Token last = m_scanner.Token; Token open = last; if (m_scanner.Token.IsPunct(":")) { m_scanner.Advance(); DoParseIdentifier(ref last); DoSkipBody("(", ")", ref open, ref last); } last = m_scanner.Token; open = new Token(); Token start = m_scanner.Token; Token close = last; if (m_scanner.Token.IsPunct(";")) { m_scanner.Advance(); } else { DoSkipBody("{", "}", ref open, ref last); close = last; } CsBody body = open.Length > 0 ? new CsBody(name, start.Offset, open.Offset, close.Offset + close.Length - start.Offset, start.Line) : null; members.Add(new CsMethod(nameOffset, body, true, false, null, parms.ToArray(), gargs, "void", attrs, modifiers, name, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); }
// class-declaration: // attributes? class-modifiers? partial? class identifier type-parameter-list? // class-base? type-parameter-constraints-clauses? class-body ;? private CsType DoParseClassDeclaration(CsAttribute[] attrs, MemberModifiers modifiers, Token first, MemberModifiers defaultAccess) { // partial? if (m_scanner.Token.IsIdentifier("partial")) { m_scanner.Advance(); modifiers |= MemberModifiers.Partial; } // class DoParseKeyword("class"); // identifier Token last = m_scanner.Token; int nameOffset = m_scanner.Token.Offset; string name = DoParseIdentifier(ref last); // type-parameter-list? string gargs = null; if (m_scanner.Token.IsPunct("<")) { gargs = DoScanBody("<", ">", ref last); } // class-base? CsBases bases = DoParseInterfaceTypeList(last); // type-parameter-constraints-clauses? string constraints = DoParseTypeParameterConstraintsClauses(); // class-body var members = new List<CsMember>(); var types = new List<CsType>(); Token open = m_scanner.Token; Token start = m_scanner.Token; DoParseStructBody(members, types, ref open, ref last); Token close = last; // ;? if (m_scanner.Token.IsPunct(";")) { last = m_scanner.Token; m_scanner.Advance(); } CsBody body = new CsBody(name, start.Offset, open.Offset, close.Offset + close.Length - start.Offset, start.Line); CsClass result = new CsClass(nameOffset, body, members.ToArray(), types.ToArray(), bases, constraints, gargs, attrs, modifiers, name, first.Offset, last.Offset + last.Length - first.Offset, first.Line); return result; }
// attribute-list: // attribute // attribute-list , attribute // // attribute: // attribute-name attribute-arguments? // // attribute-name: // type-name // // attribute-arguments: // ( positional-argument-list? ) // ( positional-argument-list , named-argument-list ) // ( named-argument-list ) private void DoParseAttributeList(Token first, string target, List<CsAttribute> attrs) { while (true) { Token last = m_scanner.Token; string name = DoParseTypeName(ref last); string args = string.Empty; if (m_scanner.Token.IsPunct("(")) { args = DoScanBody("(", ")", ref last); } attrs.Add(new CsAttribute(target, name, args, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); if (m_scanner.Token.IsPunct(",")) m_scanner.Advance(); else break; } }
private void DoAdvance(char* buffer) { m_buffer = buffer; // skip whitespace and comments while (true) { if (char.IsWhiteSpace(Current)) DoSkipWhiteSpace(); else if (Current == '#') DoSkipComment(); else break; } // identifier if (char.IsLetter(Current) || Current == '_') { DoScanIdentifier(); } // string else if (Current == '"') { DoScanString(); } // punctuation else if (char.IsPunctuation(Current) || char.IsSymbol(Current)) { DoScanPunct(); } // eof else if (Current == '\x00') { m_token = new Token(m_text, m_token.Line); ++m_index; } // catch all else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Other); ++m_index; } }
// event-declaration: // attributes? event-modifiers? event type variable-declarators ; // attributes? event-modifiers? event type member-name { event-accessor-declarations } // // variable-declarators: // variable-declarator // variable-declarators , variable-declarator private void DoParseEventDeclaration(List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { string type = DoParseType(); Token next = m_scanner.LookAhead(1); if (next.IsPunct("=") || next.IsPunct(",") || next.IsPunct(";")) { while (true) { DoParseEventDeclarator(type, members, attrs, modifiers, first); if (m_scanner.Token.IsPunct(",")) m_scanner.Advance(); else break; } DoParsePunct(";"); } else { int nameOffset = m_scanner.Token.Offset; string name = DoParseMemberName(); Token last = m_scanner.Token; Token open = m_scanner.Token; DoSkipBody("{", "}", ref open, ref last); members.Add(new CsEvent(nameOffset, type, name, attrs, modifiers, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); } }
private void DoScanPunct() { switch (Current) { case '=': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '!': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; default: m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; break; } }
// variable-declarator: // identifier // identifier = variable-initializer private void DoParseEventDeclarator(string type, List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { Token last = m_scanner.Token; int nameOffset = m_scanner.Token.Offset; string name = DoParseIdentifier(ref last); if (m_scanner.Token.IsPunct("=")) { DoParsePunct("="); // TODO: this won't parse multiple declarators correctly. We could probably handle this // by scanning until we hit a semi-colon or a comma not within brackets. while (m_scanner.Token.IsValid() && !m_scanner.Token.IsPunct(";")) { last = m_scanner.Token; m_scanner.Advance(); } } members.Add(new CsEvent(nameOffset, type, name, attrs, modifiers, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); }
public Token LookAhead(int delta) { #if DEBUG Contract.Requires(delta >= 0, "delta is negative"); #endif int oldIndex = m_index; int oldLine = m_line; Token oldToken = m_token; fixed (char* buffer = m_text) { while (delta-- > 0 && Token.Kind != TokenKind.Invalid) { DoAdvance(buffer); } } Token token = m_token; m_index = oldIndex; m_line = oldLine; m_token = oldToken; return token; }
// TODO: this won't parse multiple declarators correctly. One possible fix is // to scan tokens until we hit a semi-colon or until we hit a comma not // inside brackets. Another is to add a simple parser which ignores things // like precedence. If this is fixed also update DoParseEventDeclarator. private string DoParseExpression(ref Token last, params string[] eols) { Token start = m_scanner.Token; while (m_scanner.Token.IsValid() && ! eols.Any(e => m_scanner.Token.IsPunct(e))) { m_scanner.Advance(); } last = m_scanner.Token; return m_text.Substring(start.Offset, last.Offset - start.Offset); }
// exponent-part: // e sign? decimal-digits // E sign? decimal-digits // // sign: one of // + - // // real-type-suffix: one of // F f D d M m private void DoScanExponent(int offset) { if (Current == 'e' || Current == 'E') { m_index += 1; if (Current == '+' || Current == '-') m_index += 1; while (Current >= '0' && Current <= '9') { ++m_index; } } if (Current == 'f' || Current == 'd' || Current == 'm' || Current == 'F' || Current == 'D' || Current == 'M') { m_index += 1; } m_token = new Token(m_text, offset, m_index - offset, m_line, TokenKind.Number); }
// extern-alias-directives: // extern-alias-directive // extern-alias-directives extern-alias-directive // // extern-alias-directive: // extern alias identifier ; private void DoParseExternAliasDirectives(ref Token last, List<CsExternAlias> externs) { while (m_scanner.Token.IsIdentifier("extern")) { try { Token first = m_scanner.Token; m_scanner.Advance(); DoParseKeyword("alias"); string name = DoParseIdentifier(ref last); last = DoParsePunct(";"); externs.Add(new CsExternAlias(name, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); } catch (BaseParserException e) { if (m_try) DoRecover("extern alias", e.Message, ms_unitRecovery); else throw; } } }
// identifier: // available-identifier // @ identifier-or-keyword // // available-identifier: // An identifier-or-keyword that is not a keyword // // identifier-or-keyword: // identifier-start-character identifier-part-characters? // // identifier-start-character: // letter-character // _ (the underscore character U+005F) // // identifier-part-characters: // identifier-part-character // identifier-part-characters identifier-part-character private void DoScanIdentifier() { int offset = m_index; while (CsHelpers.CanContinueIdentifier(Current)) { ++m_index; } m_token = new Token(m_text, offset, m_index - offset, m_line, TokenKind.Identifier); }
// field-declaration: // attributes? field-modifiers? type variable-declarators ; private void DoParseFieldDeclaration(string type, List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { while (true) { DoParseFieldDeclarator(type, members, attrs, modifiers, first); if (m_scanner.Token.IsPunct(",")) m_scanner.Advance(); else break; } DoParsePunct(";"); }
// operator-or-punctuator: one of // { } [ ] ( ) . , : ; // + - * / % & | ^ ! ~ // = < > ? ?? :: ++ -- && || // -> == != <= >= += -= *= /= %= // &= |= ^= << <<= => private void DoScanPunct() { switch (Current) { case '?': if (Next == '?') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case ':': if (Next == ':') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '+': if (Next == '+') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '-': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '>') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '-') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '&': if (Next == '&') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '|': if (Next == '|') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '=': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '>') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '!': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '<': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '<') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else if (Next == '<' && NextNext == '=') { m_token = new Token(m_text, m_index, 3, m_line, TokenKind.Punct); m_index += 3; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '>': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '*': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '/': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '%': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; case '^': if (Next == '=') { m_token = new Token(m_text, m_index, 2, m_line, TokenKind.Punct); m_index += 2; } else { m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; } break; default: m_token = new Token(m_text, m_index, 1, m_line, TokenKind.Punct); ++m_index; break; } }
// constant-declarator: // identifier = constant-expression // // field-declaration: // attributes? field-modifiers? type variable-declarators ; // // variable-declarator: // identifier // identifier = variable-initializer private void DoParseFieldDeclarator(string type, List<CsMember> members, CsAttribute[] attrs, MemberModifiers modifiers, Token first) { Token last = m_scanner.Token; int nameOffset = m_scanner.Token.Offset; string name = DoParseIdentifier(ref last); string value = null; if (m_scanner.Token.IsPunct("=")) { DoParsePunct("="); value = DoParseExpression(ref last, ";"); } members.Add(new CsField(nameOffset, type, value, attrs, modifiers, name, first.Offset, last.Offset + last.Length - first.Offset, first.Line)); }
// verbatim-string-literal: // @" verbatim-string-literal-characters? " // // verbatim-string-literal-characters: // verbatim-string-literal-character // verbatim-string-literal-characters verbatim-string-literal-character // // verbatim-string-literal-character: // single-verbatim-string-literal-character // quote-escape-sequence // // single-verbatim-string-literal-character: // any character except " // // quote-escape-sequence: // "" private void DoScanVerbatimString() { int line = m_line; m_index = m_index + 2; int offset = m_index - 1; while (Current != '\x00') { int old = m_index; if (char.IsWhiteSpace(Current)) DoSkipWhiteSpace(); else if (Current == '\\' && Next == '"') m_index = m_index + 2; else if (Current == '"' && Next == '"') m_index = m_index + 2; else if (Current == '"') break; else ++m_index; Contract.Assert(m_index != old, string.Format("failed to advance from {0} '{1}' on line {2}", Token.Kind, Token.Text().EscapeAll(), Token.Line)); } if (Current == '"') { ++m_index; m_token = new Token(m_text, offset, m_index - offset, line, TokenKind.String); } else { throw new ScannerException("Expected a terminating '\"' for line {0}", m_line); } }
public string ParseType(string text, int start, int stop, ref Token token) { Contract.Requires(text != null, "text is null"); Contract.Requires(start >= 0, "start is negative"); Contract.Requires(start <= text.Length, "start is too big"); Contract.Requires(start <= stop, "stop is too small"); var result = new StringBuilder(); try { m_text = text.Substring(start, stop - start); m_scanner = new Scanner(); m_scanner.Init(m_text); if (m_scanner.Token.Kind == TokenKind.Identifier) { DoParseType(result); token = m_scanner.Token; } } catch (LocalException) { result = null; } catch (ScannerException) { result = null; } catch (Exception e) { Log.WriteLine(TraceLevel.Warning, "LocalsParser", "Unexpected error:"); Log.WriteLine(TraceLevel.Warning, "LocalsParser", "{0}", e); result = null; } return result != null && result.Length > 0 ? result.ToString() : null; }