private void ProcessMultyDelimeterToken(ref StringBuilder token, CrarReader reader) { var code_symbol = reader.ReadNextChar(); if (code_symbol.chartype == SPECIAL_CODES.SINGLE_DIGIT_DELIMITERS) { token.Append((char)code_symbol.charcode); ProcessMultyDelimeterToken(ref token, reader); } return; }
public void ProcessSourceCode() { StringBuilder a_token = new StringBuilder(""); var charReader = new CrarReader(); while (charReader.canRead) { var code_symbol = charReader.ReadNextChar(); var token = new TokenInfo() { row = code_symbol.row, colmn = code_symbol.colmn }; if (IsErrorCode(code_symbol)) { continue; } if (code_symbol.chartype == SPECIAL_CODES.IDENTIFIERS) { a_token.Append((char)code_symbol.charcode); ProcessIdenifierToken(ref a_token, charReader); token.type = SPECIAL_CODES.IDENTIFIERS; } if (code_symbol.chartype == SPECIAL_CODES.CONST_CODE) { a_token.Append((char)code_symbol.charcode); ProcessConstantToken(ref a_token, charReader); token.type = SPECIAL_CODES.CONST_CODE; } if (code_symbol.chartype == SPECIAL_CODES.SINGLE_DIGIT_DELIMITERS) { a_token.Append((char)code_symbol.charcode); if (code_symbol.chartype == SPECIAL_CODES.MULTI_CHARACTER_DELIMITERS) { ProcessMultyDelimeterToken(ref a_token, charReader); } token.type = SPECIAL_CODES.SINGLE_DIGIT_DELIMITERS; } token.value = a_token.ToString(); AddToLexicalResult(token); a_token.Clear(); } TokenTable.DumpTablesCurrentState(); TokenTable.WriteToJson("lexicalData.json", JsonConvert.SerializeObject(lexicalAnalysisResult)); SaveToFileLexicalResult(); }
private void ProcessConstantToken(ref StringBuilder token, CrarReader reader) { var code_symbol = reader.ReadNextChar(); if (code_symbol.chartype == SPECIAL_CODES.CONST_CODE) { token.Append((char)code_symbol.charcode); ProcessConstantToken(ref token, reader); } else { if (code_symbol.charcode == (int)';') { token.Append((char)code_symbol.charcode); } } return; }