/// <summary> /// Creates a new instance of <see cref="Lexicon"/> with token_table. /// </summary> /// <param name="token_table">The <see cref="Polite.TokenTable"/> to store and extract <see cref="Token.Types"/> from.</param> public Lexicon(TokenTable token_table) { this.TokenTable = token_table; }
/// <summary> /// Finds, converts, and returns all literals found in the provided JavaScript source code. /// </summary> /// <param name="source">The source code to extract literals from.</param> /// <param name="token_table">The <see cref="Polite.TokenTable"/> to use as reference for the creation of a returned <see cref="Token"/></param> /// <param name="reserved">The <see cref="BitArray"/> to set true for every index that the extracted literals occupy within source.</param> /// <returns>Every extracted literal <see cref="Token"/> found in source.</returns> public static Token[] SniffLiterals(string source, TokenTable token_table, ref BitArray reserved) { List <Token> result = new List <Token>(); // String sniffer int start = 0; int start1 = 0; int start2 = 0; while ( start < source.Length && start > -1 && ( (start1 = source.IndexOf("\"", start)) != -1) || (start2 = source.IndexOf("\'", start)) != -1 ) { start = start1 == -1 ? start2 : start1; char opener = source[start]; int end = -1; while // Find next valid closer ( (end = (start + 1 < source.Length - 1 ? source.IndexOf(opener, start + 1) : throw new Exception("Opening encapsulator at index " + start + " has no closer.") ) ) != -1 && (end < source.Length - 1 ? true : throw new Exception("Opening encapsulator at index " + start + " has no closer.")) && source[end - 1] == '\\' ) { } Token newToken = token_table[Token.Types.Primary.ToPUA(Token.Types.Primary.Literal)].Clone(); newToken.SecondaryType = Token.Types.Secondary.Literals.String; newToken.Name = source.Substring(start, (end == -1 ? source.Length - 1 : end) - start + 1); newToken.Value = newToken.Name.Substring(1, newToken.Name.Length - 2); newToken.Start = start; newToken.End = end; for (int i = start; i <= end; i += 1) { reserved.Set(i, true); } result.Add(newToken); start = end + 1; } // Array sniffer start = 0; start1 = 0; start2 = 0; while ( start < source.Length && (start = source.IndexOf("[]", start)) != -1 ) { if (reserved.Get(start)) { start += 1; continue; } Token newToken = token_table[Token.Types.Primary.ToPUA(Token.Types.Primary.Literal)].Clone(); newToken.SecondaryType = Token.Types.Secondary.Literals.Array; newToken.Value = new List <object>(); newToken.Start = start; newToken.End = start + 1; for (int i = start; i <= newToken.End; i += 1) { reserved.Set(i, true); } result.Add(newToken); start = newToken.End + 1; } // Boolean sniffer start = 0; start1 = 0; start2 = 0; string trueL = "true"; string falseL = "false"; while ( (start1 = source.IndexOf(trueL, start)) != -1 || (start2 = source.IndexOf(falseL, start)) != -1 ) { start = start1 == -1 ? start2 : start1; if (reserved.Get(start)) { start += 1; continue; } Token newToken = token_table[Token.Types.Primary.ToPUA(Token.Types.Primary.Literal)].Clone(); newToken.SecondaryType = Token.Types.Secondary.Literals.Boolean; if (source[start] == 't') { newToken.Name = trueL; } if (source[start] == 'f') { newToken.Name = falseL; } newToken.Value = Convert.ToBoolean(newToken.Name); newToken.Start = start; newToken.End = start + newToken.Name.Length - 1; for (int i = start; i <= newToken.End; i += 1) { reserved.Set(i, true); } result.Add(newToken); start = newToken.End + 1; } // Number sniffer (integer and double) for (int i = 0; i < source.Length; i += 1) { char character = source[i]; if ( !reserved.Get(i) && char.IsDigit(character) ) { start = i; bool foundDecimal = false; while // Find end of digit string ( char.IsDigit(source[i]) || (!foundDecimal && (foundDecimal = source[i] == '.')) // Thinks integers are doubles, need to figure that out ) { i += 1; } i -= 1; Token newToken = token_table[Token.Types.Primary.ToPUA(Token.Types.Primary.Literal)].Clone(); newToken.Name = source.Substring(start, i - start + 1); if (foundDecimal) { newToken.SecondaryType = Token.Types.Secondary.Literals.Double; newToken.Value = Convert.ToDouble(newToken.Name); } else { newToken.SecondaryType = Token.Types.Secondary.Literals.Integer; newToken.Value = Convert.ToInt32(newToken.Name); } newToken.Start = start; newToken.End = start + newToken.Name.Length - 1; for (i = start; i <= newToken.End; i += 1) { reserved.Set(i, true); } result.Add(newToken); } } return(result.ToArray()); }
internal Scope(int lbp, string value, int ID) : base(Token.Types.Primary.Scope, Token.Types.Undefined, value, lbp) { this.Definitions = new TokenTable(); }