コード例 #1
0
        public void IsEndOfStream_AtBeginningWithoutData_ReturnsTrue()
        {
            MemoryStream    memStream       = new MemoryStream();
            BaseInputStream testInputStream = new BaseInputStream(memStream);

            bool isEnd = testInputStream.IsEndOfStream();

            Assert.IsTrue(isEnd);
        }
コード例 #2
0
 //
 // Exception audit:
 //
 //  Verdict
 //    Exception wrapping is required.
 //
 //  Rationale
 //    `java.io.InputStream.close()` throws an exception, see:
 //
 //     https://developer.android.com/reference/java/io/InputStream?hl=en
 //
 public override void Close()
 {
     try {
         BaseInputStream.Close();
     } catch (Java.IO.IOException ex) when(JNIEnv.ShouldWrapJavaException(ex))
     {
         throw new IOException(ex.Message, ex);
     }
 }
コード例 #3
0
        public void Next_WhenEndStream_ThrowsEndOfStreamException()
        {
            byte[] buff = new byte[3] {
                32, 14, 25
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream);

            for (int i = 0; i < 4; i++, testInputStream.Next())
            {
                ;
            }
        }
コード例 #4
0
 //
 // Exception audit:
 //
 //  Verdict
 //    Exception wrapping is required.
 //
 //  Rationale
 //    `java.io.InputStream.close()` throws an exception, see:
 //
 //     https://developer.android.com/reference/java/io/InputStream?hl=en
 //
 protected override void Dispose(bool disposing)
 {
     if (disposing && BaseInputStream != null)
     {
         try {
             BaseFileChannel = null;
             BaseInputStream.Close();
             BaseInputStream.Dispose();
         } catch (Java.IO.IOException ex) when(JNIEnv.ShouldWrapJavaException(ex))
         {
             throw new IOException(ex.Message, ex);
         }
     }
 }
コード例 #5
0
        public void Next_WhenBuffersAreSwapped_ReturnsCorrectItem()
        {
            byte[] buff = new byte[10] {
                32, 14, 25, 14, 23, 42, 16, 21, 15, 76
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream, 3);

            for (int i = 0; i < 10; i++)
            {
                char curr = testInputStream.Next();
                Assert.AreEqual((char)buff[i], curr);
            }
        }
コード例 #6
0
        public void Peek_AtPosition_ReturnsCorrectItemFromStream()
        {
            byte[] buff = new byte[8] {
                32, 14, 25, 14, 12, 51, 25, 33
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream);
            char            item;

            for (int i = 0; i < 5; i++, testInputStream.Next())
            {
                item = testInputStream.Peek();
                Assert.AreEqual((char)buff[i], item);
            }
        }
コード例 #7
0
        public void Line_NewLinesTracking_ReturnsCorrectNumberOfLines()
        {
            byte[] buff = new byte[10] {
                (byte)'\n', 32, 14, 25, 14, (byte)'\n', 42, 16, (byte)'\n', (byte)'\n'
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream);

            for (int i = 0; i < 10; i++, testInputStream.Next())
            {
                ;
            }

            Assert.AreEqual(4, testInputStream.Line);
        }
コード例 #8
0
        public void Tokenize_SkipCarriageReturn_ReturnsNextToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes("\r\r");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens = lexer.Tokenize((line, cause) => Assert.Fail());

            List <Token> expected = new List <Token>
            {
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #9
0
        public void Tokenize_SingleLineCommentStopOnEndStream_ReturnsCommentToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes("// some comment here  ");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new WordToken("// some comment here  ", TokenType.Comment),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #10
0
        public void Peek_AtEnd_ReturnsEndItem()
        {
            byte[] buff = new byte[3] {
                32, 14, 25
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream);

            for (int i = 0; i < 3; i++, testInputStream.Next())
            {
                ;
            }
            char last = testInputStream.Peek();

            Assert.AreEqual(0, last);
        }
コード例 #11
0
        public void IsEndOfStream_WhenEnd_ReturnsTrue()
        {
            byte[] buff = new byte[3] {
                32, 14, 25
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream);

            for (int i = 0; i < 3; i++, testInputStream.Next())
            {
                ;
            }
            bool isEnd = testInputStream.IsEndOfStream();

            Assert.IsTrue(isEnd);
        }
コード例 #12
0
        public void Tokenize_MultilineCommentUnclosed_ReturnsCommentTokenAndCallsOnError()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes("/* \n\n**\n this is comment ** ////gvj");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.AreEqual(Lexer.UNCLOSED_MULTILINE_COMMENT, cause));
            List <Token> expected = new List <Token>
            {
                new WordToken("/* \n\n**\n this is comment ** ////gvj", TokenType.Comment),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #13
0
        public void Tokenize_Number_ReturnsNumberToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes(" 1234");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.Whitespace),
                new NumToken(1234),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #14
0
        public void Tokenize_StringSingleQuotesSingleQuotesCombination_ReturnsStringToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes(" 'This is string \"inner\"'");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.Whitespace),
                new WordToken("'This is string \"inner\"'", TokenType.String),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #15
0
        public void Tokenize_StringSingleQuotesUnclosed_ReturnsStringTokenAndCallsOnError()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes(" 'This is string ");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.AreEqual(cause, Lexer.UNCLOSED_STRING));
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.Whitespace),
                new WordToken("'This is string ", TokenType.Error),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #16
0
        public void IsEndOfStream_WhenNotEnd_ReturnsFalse()
        {
            byte[] buff = new byte[3] {
                32, 14, 25
            };
            MemoryStream    memStream       = new MemoryStream(buff);
            BaseInputStream testInputStream = new BaseInputStream(memStream);

            bool isEnd;

            for (int i = 0; i < 2; i++)
            {
                isEnd = testInputStream.IsEndOfStream();
                testInputStream.Next();
                Assert.IsFalse(isEnd);
            }
        }
コード例 #17
0
        //
        // Exception audit:
        //
        //  Verdict
        //    Exception wrapping is required.
        //
        //  Rationale
        //    `java.io.InputStream.read(byte[], int, int)` throws an exception, see:
        //
        //     https://developer.android.com/reference/java/io/InputStream?hl=en#read(byte%5B%5D,%20int,%20int)
        //
        public override int Read(byte[] buffer, int offset, int count)
        {
            int res;

            try {
                res = BaseInputStream.Read(buffer, offset, count);
            } catch (Java.IO.IOException ex) when(JNIEnv.ShouldWrapJavaException(ex))
            {
                throw new IOException(ex.Message, ex);
            }

            if (res == -1)
            {
                return(0);
            }
            return(res);
        }
コード例 #18
0
        public void Tokenize_StringDoubleQuotesEscapeBackslash_ReturnsStringToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes(" \"This is string \\\\ or this \\ \"");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.Whitespace),
                new WordToken("\"This is string \\\\ or this \\ \"", TokenType.String),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #19
0
        public void Tokenize_MultilineComment_ReturnsCommentToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes("/* \n\n**\n this is comment */**");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new WordToken("/* \n\n**\n this is comment */", TokenType.Comment),
                new Token(TokenType.Star),
                new Token(TokenType.Star),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #20
0
        public void Tokenize_UnresolvedSymbol_ReturnsUnidentifiedTokenAndCallsOnError()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes(" Object `");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.AreEqual(cause, Lexer.UNIDENTIFIED_SYMBOL));
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.Whitespace),
                new WordToken("Object", TokenType.Object),
                new Token(TokenType.Whitespace),
                new WordToken("`", TokenType.Error),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #21
0
        public void Tokenize_ReservedOrIdentifier_ReturnsReservedToken()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes(" Object var someIdentifier132 ");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.Whitespace),
                new WordToken("Object", TokenType.Object),
                new Token(TokenType.Whitespace),
                new WordToken("var", TokenType.Var),
                new Token(TokenType.Whitespace),
                new WordToken("someIdentifier132", TokenType.Identifier),
                new Token(TokenType.Whitespace),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }
コード例 #22
0
ファイル: Program.cs プロジェクト: thetedgt/IPL
        static int Main(string[] args)
        {
            if(args.Length < 2)
            {
                Console.WriteLine("Input and output destinations not specified!");
                return 1;
            }

            try
            {
                BaseInputStream stream = new BaseInputStream(args[0]);
                Lexer lex = new Lexer(stream);
                List<Token> tokens = lex.Tokenize((line, cause) => Console.Out.WriteLine($"On line {line} error: {cause}"));
                SyntaxHighlighter.GenerateHtml(tokens, args[1]);
            }
            catch(Exception e)
            {
                Console.WriteLine(e.Message);
                return 1;
            }

            return 0;
        }
コード例 #23
0
        public void Tokenize_Operators_ReturnsOperatorTokens()
        {
            byte[]          buff        = Encoding.ASCII.GetBytes("(){},.-+;*/%~&^|?:[]=!><\\=====!==!=>=<=--++>><<&&||*=/=%=+=-=<<=>>=&=^=|=");
            MemoryStream    memStream   = new MemoryStream(buff);
            BaseInputStream inputStream = new BaseInputStream(memStream);
            Lexer           lexer       = new Lexer(inputStream);

            List <Token> tokens   = lexer.Tokenize((line, cause) => Assert.Fail());
            List <Token> expected = new List <Token>
            {
                new Token(TokenType.LeftParen),
                new Token(TokenType.RightParen),
                new Token(TokenType.LeftBrace),
                new Token(TokenType.RightBrace),
                new Token(TokenType.Comma),
                new Token(TokenType.Dot),
                new Token(TokenType.Minus),
                new Token(TokenType.Plus),
                new Token(TokenType.Semicolon),
                new Token(TokenType.Star),
                new Token(TokenType.Division),
                new Token(TokenType.Modulo),
                new Token(TokenType.BitwiseNot),
                new Token(TokenType.BitwiseAnd),
                new Token(TokenType.BitwiseXor),
                new Token(TokenType.BitwiseOr),
                new Token(TokenType.QuestionMark),
                new Token(TokenType.Colon),
                new Token(TokenType.LeftSquareBracket),
                new Token(TokenType.RightSquareBracket),
                new Token(TokenType.Equal),
                new Token(TokenType.Bang),
                new Token(TokenType.Greater),
                new Token(TokenType.Less),
                new Token(TokenType.Backslash),

                new WordToken("===", TokenType.StrictEqual),
                new WordToken("==", TokenType.EqualEqual),
                new WordToken("!==", TokenType.StrictNotEqual),
                new WordToken("!=", TokenType.NotEqual),
                new WordToken(">=", TokenType.GreaterEqual),
                new WordToken("<=", TokenType.LessEqual),
                new WordToken("--", TokenType.MinusMinus),
                new WordToken("++", TokenType.PlusPlus),
                new WordToken(">>", TokenType.RightShift),
                new WordToken("<<", TokenType.LeftShift),
                new WordToken("&&", TokenType.LogicalAnd),
                new WordToken("||", TokenType.LogicalOr),
                new WordToken("*=", TokenType.StarEqual),
                new WordToken("/=", TokenType.DivideEqual),
                new WordToken("%=", TokenType.ModuloEqual),
                new WordToken("+=", TokenType.PlusEqual),
                new WordToken("-=", TokenType.MinusEqual),
                new WordToken("<<=", TokenType.LeftShiftEqual),
                new WordToken(">>=", TokenType.RightShiftEqual),
                new WordToken("&=", TokenType.BitwiseAndEqual),
                new WordToken("^=", TokenType.BitwiseXorEqual),
                new WordToken("|=", TokenType.BitwiseOrEqual),
                new Token(TokenType.EOF)
            };

            CollectionAssert.AreEqual(tokens, expected);
        }