Beispiel #1
0
        private void ScanSource(TokenList<BasicToken> sourceTokens)
        {
            for (var i = 0; sourceTokens[i].Type != BasicTokenType.EndOfFile; i++)
            {
                var token = sourceTokens[i];

                switch (token.Type)
                {
                    case BasicTokenType.Word:
                        {
                            HashSet<string> opcodes = new HashSet<string>
                        {
                            "set", "add", "sub", "mul", "div", "mod", "inc", "dec", "not", "and", "or", "xor", "shl",
                            "shr", "push", "pop", "jmp", "call", "ret", "in", "out", "cmp", "jz", "jnz", "je", "ja",
                            "jb", "jae", "jbe", "jne"
                        };

                            if (opcodes.Contains(token.Value.ToLower()))
                                tokens.Add(new Token(TokenType.Keyword, token.Value.ToLower(), token.Line));
                            else
                            {
                                if (defines.ContainsKey(token.Value))
                                {
                                    List<Token> define = defines[token.Value];
                                    foreach (Token defineToken in define)
                                        tokens.Add(new Token(defineToken.Type, defineToken.Value, defineToken.Line));
                                }
                                else
                                    tokens.Add(new Token(TokenType.Word, token.Value, token.Line));
                            }
                            break;
                        }

                    case BasicTokenType.Delimiter:
                        {
                            Dictionary<string, TokenType> delimiters = new Dictionary<string, TokenType>
                        {
                            { ",", TokenType.Comma },
                            { "[", TokenType.OpenBracket },
                            { "]", TokenType.CloseBracket },
                            { "(", TokenType.OpenParentheses },
                            { ")", TokenType.CloseParentheses },
                            { ".", TokenType.Period },
                            { "+", TokenType.Add },
                            { "-", TokenType.Subtract },
                            { "*", TokenType.Multiply },
                            { "/", TokenType.Divide },
                            { "%", TokenType.Modulo },
                            { "~", TokenType.BitwiseNot },
                            { "&", TokenType.BitwiseAnd },
                            { "|", TokenType.BitwiseOr },
                            { "^", TokenType.BitwiseXor }
                        };

                            if (delimiters.ContainsKey(token.Value))
                            {
                                tokens.Add(new Token(delimiters[token.Value], token.Value, token.Line));
                                break;
                            }

                            if (token.Value == ":" && tokens.Count > 0)
                            {
                                var last = tokens[tokens.Count - 1];
                                if (last.Type == TokenType.Word)
                                {
                                    tokens.RemoveAt(tokens.Count - 1);
                                    tokens.Add(new Token(TokenType.Label, last.Value, last.Line));
                                    break;
                                }
                            }

                            if (token.Value == "#")
                            {
                                token = sourceTokens[++i];
                                switch (token.Value)
                                {
                                    case "include":
                                        {
                                            BasicToken filenameToken = sourceTokens[++i];
                                            string includeSource;

                                            try
                                            {
                                                includeSource = File.ReadAllText(filenameToken.Value);
                                            }
                                            catch (Exception)
                                            {
                                                throw new AssemblerException(String.Format("Cannot open included file \"{0}\" at {2}:{1}.",
                                                    filenameToken.Value, filenameToken.Line, filenameToken.Filename));
                                            }

                                            var tokenizer = new Tokenizer(includeSource);
                                            tokenizer.Scan();

                                            ScanSource(tokenizer.Tokens);
                                            break;
                                        }

                                    case "define":
                                        {
                                            List<Token> defineTokens = new List<Token>();
                                            BasicToken name = sourceTokens[++i];

                                            while (sourceTokens[++i].Line == name.Line && i < sourceTokens.Count)
                                            {
                                                defineTokens.Add(new Token(TokenType.Number, sourceTokens[i].Value, sourceTokens[i].Line));
                                            }

                                            defines.Add(name.Value, defineTokens);
                                            --i;
                                            break;
                                        }

                                    default:
                                        throw new AssemblerException(String.Format("Unexpected preprocessor directive \"{0}\".", token.Value));
                                }
                                break;
                            }

                            throw new AssemblerException(String.Format("Unexpected delimiter '{0}'", token.Value));
                        }

                    case BasicTokenType.Number:
                        tokens.Add(new Token(TokenType.Number, token.Value, token.Line));
                        break;

                    case BasicTokenType.String:
                        tokens.Add(new Token(TokenType.String, token.Value, token.Line));
                        break;

                    default:
                        throw new AssemblerException(String.Format("Unhandled BasicToken {0}", token.Type));
                }
            }
        }
Beispiel #2
0
        public void Scan()
        {
            if (hasTokenized)
                throw new InvalidOperationException("Scan() has already been called");

            var tokenizer = new Tokenizer(source);
            tokenizer.Scan();

            var t = tokenizer.Tokens;
            for (var i = 0; t[i].Type != BasicTokenType.EndOfFile; i++)
            {
                var tok = t[i];

                switch (tok.Type)
                {
                    case BasicTokenType.Word:
                    {
                        Opcode opcode;
                        Register register;

                        if (Enum.TryParse(tok.Value, true, out opcode) && opcode < Opcode.None)
                        {
                            tokens.Add(new Token(TokenType.Opcode, tok.Value.ToLower(), tok.Line));
                        }
                        else if (Enum.TryParse(tok.Value, true, out register))
                        {
                            tokens.Add(new Token(TokenType.Register, tok.Value.ToLower(), tok.Line));
                        }
                        else
                        {
                            tokens.Add(new Token(TokenType.Word, tok.Value, tok.Line));
                        }

                        break;
                    }

                    case BasicTokenType.Delimiter:
                    {
                        if (tok.Value == ",")
                        {
                            tokens.Add(new Token(TokenType.Comma, tok.Value, tok.Line));
                            break;
                        }

                        if (tok.Value == "[")
                        {
                            tokens.Add(new Token(TokenType.OpenBracket, tok.Value, tok.Line));
                            break;
                        }

                        if (tok.Value == "]")
                        {
                            tokens.Add(new Token(TokenType.CloseBracket, tok.Value, tok.Line));
                            break;
                        }

                        if (tok.Value == ":" && tokens.Count > 0)
                        {
                            var last = tokens[tokens.Count - 1];
                            if (last.Type == TokenType.Word)
                            {
                                var periodCount = last.Value.Count(c => c == '.');

                                if (periodCount > 1)
                                {
                                    throw new AssemblerException(string.Format("Label with more than one period on line {0}", last.Line));
                                }

                                tokens.RemoveAt(tokens.Count - 1);
                                tokens.Add(new Token(TokenType.Label, last.Value, last.Line));
                                break;
                            }
                        }

                        if (tok.Value == "+")
                        {
                            tokens.Add(new Token(TokenType.Plus, tok.Value, tok.Line));
                            break;
                        }

                        throw new AssemblerException(string.Format("Unexpected delimiter '{0}' on line {1}", tok.Value, tok.Line));
                    }

                    case BasicTokenType.Number:
                    {
                        tokens.Add(new Token(TokenType.Number, tok.Value, tok.Line));
                        break;
                    }

                    case BasicTokenType.String:
                    {
                        tokens.Add(new Token(TokenType.String, tok.Value, tok.Line));
                        break;
                    }

                    default:
                        throw new AssemblerException(string.Format("Unhandled BasicToken {0} on line {1}", tok.Type, tok.Line));
                }
            }

            hasTokenized = true;
        }
Beispiel #3
0
        public void Scan()
        {
            if (hasTokenized)
                throw new InvalidOperationException("Scan() has already been called");

            var tokenizer = new Tokenizer(source);
            tokenizer.Scan();

            ScanSource(tokenizer.Tokens);

            hasTokenized = true;
        }