Пример #1
0
        /// <summary>
        /// Creates a MultiLineString using the next token in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next tokens must form a &lt;MultiLineString Text&gt;.
        /// </param>
        /// <returns>
        /// A MultiLineString specified by the next token in the stream.
        /// </returns>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        /// <exception cref="GeometryIOException">
        /// If an unexpected token was encountered.
        /// </exception>
        private MultiLineString ReadMultiLineString(StreamTokenizer tokenizer)
        {
            string nextToken = GetNextEmptyOrOpener(tokenizer);

            if (nextToken.Equals(WktEmpty))
            {
                return(m_objFactory.CreateMultiLineString(
                           new LineString[] {}));
            }

            GeometryList lineStrings = new GeometryList();
            LineString   lineString  = ReadLineString(tokenizer);

            lineStrings.Add(lineString);
            nextToken = GetNextCloserOrComma(tokenizer);
            while (nextToken.Equals(TokenComma))
            {
                lineString = ReadLineString(tokenizer);
                lineStrings.Add(lineString);
                nextToken = GetNextCloserOrComma(tokenizer);
            }

            return(m_objFactory.CreateMultiLineString(
                       lineStrings.ToLineStringArray()));
        }
Пример #2
0
        /// <summary>
        /// Creates a MultiPolygon using the next token in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next tokens must form a &lt;MultiPolygon Text&gt;.
        /// </param>
        /// <returns>
        /// A MultiPolygon specified by the next token in the stream, or if
        /// the coordinates used to create the Polygon shells and holes do
        /// not form closed linestrings.
        /// </returns>
        /// <exception cref=IOException"">
        /// If an I/O error occurs.
        /// </exception>
        /// <exception cref="GeometryIOException">
        /// if an unexpected token was encountered.
        /// </exception>
        private MultiPolygon ReadMultiPolygon(StreamTokenizer tokenizer)
        {
            string nextToken = GetNextEmptyOrOpener(tokenizer);

            if (nextToken.Equals(WktEmpty))
            {
                return(m_objFactory.CreateMultiPolygon(new Polygon[] {}));
            }

            GeometryList polygons = new GeometryList();
            Polygon      polygon  = ReadPolygon(tokenizer);

            polygons.Add(polygon);
            nextToken = GetNextCloserOrComma(tokenizer);
            while (nextToken.Equals(TokenComma))
            {
                polygon = ReadPolygon(tokenizer);

                polygons.Add(polygon);
                nextToken = GetNextCloserOrComma(tokenizer);
            }

            return(m_objFactory.CreateMultiPolygon(
                       polygons.ToPolygonArray()));
        }
Пример #3
0
        private Coordinate GetCoordinate(StreamTokenizer tokenizer)
        {
            Coordinate coord = null;
            double     dX    = GetNextNumber(tokenizer);
            double     dY    = GetNextNumber(tokenizer);

            if (m_bMeasured)
            {
                double dT = GetNextNumber(tokenizer);
                if (IsNumberNext(tokenizer))
                {
                    double dM = GetNextNumber(tokenizer);
                    coord = new Coordinate3DM(dX, dY, dT, dM);
                }
                else
                {
                    coord = new CoordinateM(dX, dY, dT);
                }
            }
            else
            {
                if (IsNumberNext(tokenizer))
                {
                    double dZ = GetNextNumber(tokenizer);
                    coord = new Coordinate3D(dX, dY, dZ);
                }
                else
                {
                    coord = new Coordinate(dX, dY);
                }
            }

            return(coord);
        }
Пример #4
0
        private double GetNextNumber(StreamTokenizer tokenizer)
        {
            int type = tokenizer.NextToken();

            if (type == StreamTokenizer.TT_WORD)
            {
                if (tokenizer.StringValue.Equals(NAN_SYMBOL))
                {
                    return(Double.NaN);
                }
                else
                {
                    try
                    {
                        NumberFormatInfo NFI = new NumberFormatInfo()
                        {
                            NumberDecimalSeparator = "."
                        };
                        return(Double.Parse(tokenizer.StringValue, NFI));
                    }
                    catch (FormatException ex)
                    {
                        throw new WKTParseException(ex);
                    }
                }
            }

            throw new WKTParseException("Excepted: number");
        }
Пример #5
0
        public void TestReadNumber()
        {
            string          test1      = "this is a 123.5 notAnumber";
            TextReader      reader     = new StringReader(test1);
            StreamTokenizer tokenizer  = new StreamTokenizer(reader, true);
            TokenType       tokentype1 = tokenizer.NextToken();       //this
            TokenType       tokentype2 = tokenizer.NextToken();       //is
            TokenType       tokentype3 = tokenizer.NextToken();       //a

            tokenizer.NextToken();

            double number = tokenizer.GetNumericValue();

            Assertion.AssertEquals("test1", 123.5, number);

            tokenizer.NextToken();

            try
            {
                double number2 = tokenizer.GetNumericValue();
                Assertion.Fail("This should fail because the token is not a number.");
            }
            catch
            {
            }
        }
Пример #6
0
        private IDictionary <string, object> ParseMap(string node, StreamTokenizer st)
        {
            if (string.IsNullOrWhiteSpace(node))
            {
                throw new ArgumentNullException(nameof(node));
            }
            if (st == null)
            {
                throw new ArgumentNullException(nameof(st));
            }

            var map = new Dictionary <string, object>();

            while (HasNext(st))
            {
                var type = st.Ttype;
                if (!NotLineBreak(type))
                {
                    continue;
                }
                if (type == ']')
                {
                    return(map);
                }
                var key   = st.StringValue;
                var value = ParseValue(key, st);
                map[key] = value;
            }
            throw new IOException(string.Concat(node, " incomplete"));
        }
Пример #7
0
        /// <summary>
        /// Creates a Polygon using the next token in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next tokens must form a &lt;Polygon Text&gt;.
        /// </param>
        /// <returns>
        /// A Polygon specified by the next token in the stream.
        /// </returns>
        /// <exception cref="GeometryIOException">
        /// If the coordinates used to create the Polygon shell and holes do
        /// not form closed linestrings, or if an unexpected token was
        /// encountered.
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private Polygon ReadPolygon(StreamTokenizer tokenizer)
        {
            string nextToken = GetNextEmptyOrOpener(tokenizer);

            if (nextToken.Equals(WktEmpty))
            {
                LinearRing objRing = m_objFactory.CreateLinearRing(new Coordinate[] {});

                return(m_objFactory.CreatePolygon(objRing, new LinearRing[] {}));
            }

            GeometryList holes = new GeometryList();
            LinearRing   shell = ReadLinearRing(tokenizer);

            nextToken = GetNextCloserOrComma(tokenizer);

            while (nextToken.Equals(TokenComma))
            {
                LinearRing hole = ReadLinearRing(tokenizer);
                holes.Add(hole);
                nextToken = GetNextCloserOrComma(tokenizer);
            }

            return(m_objFactory.CreatePolygon(shell,
                                              holes.ToLinearRingArray()));
        }
Пример #8
0
        public void Parse(StreamTokenizer st)
        {
            if (st == null)
            {
                throw new ArgumentNullException(nameof(st));
            }

            while (HasNext(st))
            {
                var type = st.Ttype;
                if (!NotLineBreak(type))
                {
                    continue;
                }
                var value = st.StringValue;
                if (GmlTokens.Graph == value)
                {
                    ParseGraph(st);
                    if (!HasNext(st))
                    {
                        return;
                    }
                }
            }
            throw new IOException("Graph not complete");
        }
Пример #9
0
        public void Solve(TextReader input, TextWriter output)
        {
            var    scanner = new StreamTokenizer(input);
            int    testNum = scanner.NextInt();
            Random rnd     = new Random();

            foreach (int caseId in Enumerable.Range(1, testNum))
            {
                int n      = scanner.NextInt();
                int k      = scanner.NextInt();
                int b      = scanner.NextInt();
                int t      = scanner.NextInt();
                var x      = Enumerable.Range(0, n).Select(i => scanner.NextInt()).ToArray();
                var v      = Enumerable.Range(0, n).Select(i => scanner.NextInt()).ToArray();
                int answer = 0;
                for (int i = n - 1, j = n - 1; i >= 0 && k > 0; i--)
                {
                    if (x[i] + v[i] * t >= b)
                    {
                        k--;
                        answer += j - i;
                        j--;
                    }
                }
                if (k > 0)
                {
                    output.WriteLine("Case #{0}: {1}", caseId, "IMPOSSIBLE");
                }
                else
                {
                    output.WriteLine("Case #{0}: {1}", caseId, answer);
                }
            }
            output.Close();
        }
Пример #10
0
 public void Before_each_test()
 {
     _tokenizer = new StreamTokenizer
                      {
                          ReadBufferSize = 10
                      };
 }
Пример #11
0
        private static char Unescape(String text)
        {
            StreamTokenizer parser = new StreamTokenizer(new StringReader(text));

            parser.NextToken();
            return(parser.StringValue[0]);
        }
Пример #12
0
        private static String GetNextWord(StreamTokenizer tokenizer)
        {
            try
            {
                int type = tokenizer.NextToken();

                switch (type)
                {
                case StreamTokenizer.TT_WORD:
                    String word = tokenizer.StringValue;

                    if (word.Equals(EMPTY))
                    {
                        return(EMPTY);
                    }
                    return(word);

                case '(':
                    return(L_PAREN);

                case ')':
                    return(R_PAREN);

                case ',':
                    return(COMMA);
                }

                throw new WKTParseException($"Uknown type: '{( char )type}'");
            }
            catch (IOException ex)
            {
                throw new WKTParseException(ex);
            }
        }
Пример #13
0
        /// <summary>
        /// Converts a Well-known Text representation to a Geometry.
        /// </summary>
        /// <param name="reader">A reader, which will return a "Geometry Tagged Text"
        /// string (see the OpenGIS Simple Features Specification)
        /// </param>
        /// <returns> A Geometry read from reader. </returns>
        /// <exception cref="GeometryIOException">
        /// If a parsing problem occurs.
        /// </exception>
        public override Geometry Read(TextReader reader)
        {
            if (m_objTokenizer == null)
            {
                m_objTokenizer = new StreamTokenizer(reader);
            }
            else
            {
                m_objTokenizer.Initialize(reader);
            }

            try
            {
                return(ReadGeometry(m_objTokenizer));
            }
            catch (IOException ex)
            {
                ExceptionManager.Publish(ex);

                throw new GeometryIOException(ex.ToString(), ex);
            }
            catch (Exception ex)
            {
                ExceptionManager.Publish(ex);

                throw new GeometryIOException(ex.ToString(), ex);
            }
        }
Пример #14
0
        /// <summary>
        /// Creates a Point using the next token in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next tokens must form a &lt;Point Text&gt;.
        /// </param>
        /// <returns>
        /// A Point specified by the next token in the stream.
        /// </returns>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        /// <exception cref="GeometryIOException">
        /// If an unexpected token was encountered.
        /// </exception>
        private Point ReadPoint(StreamTokenizer tokenizer)
        {
            string nextToken = GetNextEmptyOrOpener(tokenizer);

            if (nextToken.Equals(WktEmpty))
            {
                return(m_objFactory.CreatePoint((Coordinate)null));
            }

            Coordinate coord = null;

            if (m_bApplyPrecision)
            {
                coord = GetPreciseCoordinate(tokenizer);
            }
            else
            {
                coord = GetCoordinate(tokenizer);
            }
            Point point = m_objFactory.CreatePoint(coord);

            GetNextCloser(tokenizer);

            return(point);
        }
Пример #15
0
        static void Main(string[] args)
        {
            // DEMO BOILERPLATE:
            // create a test stream, with some sample text in it
            MemoryStream ms = new MemoryStream();
            StreamWriter sw = new StreamWriter(ms);

            sw.WriteLine("This is a       sample stream");
            sw.WriteLine("with\tsome # this is a comment");
            sw.WriteLine("   (example) text // this is another comment");
            sw.WriteLine("and several /comments");
            sw.Flush();
            ms.Seek(0, SeekOrigin.Begin);

            // and open a reader on it:
            StreamReader sr = new StreamReader(ms);
            // could also just open a file:
            //StreamReader sr = new StreamReader("inputfile.txt");

            // DEMO START:
            StreamTokenizer tok = new StreamTokenizer(sr,
                                                      null,                      // whitespace delimiters
                                                      new char [] { '#', '/' }); // comment start chars are tokens too

            string s;

            while ((s = tok.NextToken()) != null)
            {
                Console.WriteLine("line {0} token: '{1}'", tok.Linenum, s);
                if (s == "#")
                {
                    Console.WriteLine("  (# comment, skipping to end of line)");
                    tok.SkipToEOL();
                }
                if (s == "/")
                {
                    // peek at the next token to see if it is our
                    // second slash:
                    s = tok.NextToken();
                    if (s == null)
                    {
                        break;
                    }
                    if (s == "/")
                    {
                        Console.WriteLine("  (// comment, skipping)");
                        tok.SkipToEOL();
                    }
                    else
                    {
                        // no, so push the token back into the tokenizer
                        tok.Unget(s);
                    }
                }
            }

            Console.WriteLine("[Hit return to exit]");
            Console.ReadLine();
        }
Пример #16
0
        internal IList <Token> Tokenize(TextReader reader)
        {
            var           tokenizer = new StreamTokenizer(reader);
            IList <Token> tokens    = new List <Token>();

            tokenizer.Tokenize(tokens);     // Read directly all tokens
            return(tokens);
        }
Пример #17
0
        private static bool HasNext(StreamTokenizer st)
        {
            if (st == null)
            {
                throw new ArgumentNullException(nameof(st));
            }

            return(st.NextToken() != StreamTokenizer.TtEof);
        }
Пример #18
0
        private IDictionary <string, object> ParseEdge(StreamTokenizer st)
        {
            if (st == null)
            {
                throw new ArgumentNullException(nameof(st));
            }

            return(ParseElement(st, GmlTokens.Edge));
        }
Пример #19
0
        public void NumericTokenTest()
        {
            //Set up tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("100"));

            Tokenizer.NextToken();

            //Test token
            Assert.AreEqual(100, Tokenizer.GetNumericValue());
        }
Пример #20
0
        public void NumericTokenTest()
        {
            //Set up tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("100"));

            Tokenizer.NextToken();

            //Test token
            Assert.AreEqual(100, Tokenizer.GetNumericValue());
        }
Пример #21
0
 private void initTokenizer()
 {
     st = new StreamTokenizer(new StreamReader(i, Encoding.GetEncoding("ISO-8859-1")));
     st.ResetSyntax();
     st.WhitespaceChars(0, 32);
     st.WordChars(33, 255);
     st.CommentChar('!');
     st.QuoteChar('\'');
     st.EolIsSignificant(false);
 }
Пример #22
0
        /// <summary>
        /// Returns the next ";" in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next token must be ";".
        /// </param>
        /// <returns>The next ";" in the stream.</returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not ";".
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private string GetNextSemi(StreamTokenizer tokenizer)
        {
            string nextWord = GetNextWord(tokenizer);

            if (nextWord.Equals(TokenSemi))
            {
                return(nextWord);
            }
            throw new GeometryIOException("Expected ';' but encountered '" + nextWord + "'");
        }
Пример #23
0
        /// <summary>
        /// Returns the next "EMPTY" or "(" in the stream as uppercase text.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next token must be "EMPTY" or "(".
        /// </param>
        /// <returns>
        /// The next "EMPTY" or "(" in the stream as uppercase text.
        /// </returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not "EMPTY" or "(".
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private string GetNextEmptyOrOpener(StreamTokenizer tokenizer)
        {
            string nextWord = GetNextWord(tokenizer);

            if (nextWord.Equals(WktEmpty) || nextWord.Equals(TokenLParan))
            {
                return(nextWord);
            }
            throw new GeometryIOException("Expected 'EMPTY' or '(' but encountered '" + nextWord + "'");
        }
Пример #24
0
        /// <summary>
        /// Returns the next ")" or "," in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next token must be ")" or ",".
        /// </param>
        /// <returns>The next ")" or "," in the stream.</returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not ")" or ",".
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private string GetNextCloserOrComma(StreamTokenizer tokenizer)
        {
            string nextWord = GetNextWord(tokenizer);

            if (nextWord.Equals(TokenComma) || nextWord.Equals(TokenRParan))
            {
                return(nextWord);
            }

            throw new GeometryIOException("Expected ')' or ',' but encountered '" + nextWord + "'");
        }
Пример #25
0
        private Point ReadPointText(StreamTokenizer tokenizer)
        {
            List <double[]> coordinateSequence = GetCoordinateSequence(tokenizer, false);

            if (coordinateSequence == null)
            {
                return(null);
            }

            return(new Point(this.srs).SetX(coordinateSequence.ElementAt(0)[0]).SetY(coordinateSequence.ElementAt(0)[1]));
        }
Пример #26
0
        private static String GetNextCloserOrComma(StreamTokenizer tokenizer)
        {
            String nextWord = GetNextWord(tokenizer);

            if (nextWord.Equals(COMMA) || nextWord.Equals(R_PAREN))
            {
                return(nextWord);
            }

            throw new WKTParseException($"Excepted: {COMMA} or {R_PAREN}");
        }
Пример #27
0
 internal Lexer(string s)
 {
     tok = new StreamTokenizer(new CharArrayReader(s.ToCharArray()));
     tok.QuoteChar('"');
     tok.ParseNumbers();
     tok.OrdinaryChar(',');
     tok.OrdinaryChar('(');
     tok.OrdinaryChar(')');
     tok.WordChars('$', '$');
     tok.WordChars('_', '_');
 }
Пример #28
0
        private String GetNextCloser(StreamTokenizer tokenizer)
        {
            String nextWord = GetNextWord(tokenizer);

            if (nextWord.Equals(R_PAREN))
            {
                return(nextWord);
            }

            throw new WKTParseException($"Excepted: {R_PAREN}");
        }
Пример #29
0
        private static String GetNextEmptyOrOpener(StreamTokenizer tokenizer)
        {
            String nextWord = GetNextWord(tokenizer);

            if (nextWord.Equals(EMPTY) || nextWord.Equals(L_PAREN))
            {
                return(nextWord);
            }

            throw new WKTParseException($"Excepted: {EMPTY} or {L_PAREN}");
        }
Пример #30
0
        /// <summary>
        /// Returns the next word in the stream as uppercase text.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next token must be a word.
        /// </param>
        /// <returns>
        /// The next word in the stream as uppercase text.
        /// </returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not a word.
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private string GetNextWord(StreamTokenizer tokenizer)
        {
            Token token = null;

            if (!tokenizer.NextToken(out token))
            {
                return(null);
            }

            TokenType type = token.Type;

            switch (type)
            {
            case TokenType.Eof:
                throw new GeometryIOException("Expected word but encountered end of stream");

            case TokenType.Eol:
                throw new GeometryIOException("Expected word but encountered end of line");

            case TokenType.Float:
                throw new GeometryIOException("Expected word but encountered number: " + token.StringValue);

            case TokenType.Integer:
                throw new GeometryIOException("Expected word but encountered number: " + token.StringValue);

            case TokenType.Word:
                return(token.StringValue.ToUpper(CultureInfo.InvariantCulture));

            default:
            {
                string sVal = token.StringValue;
                if (sVal == TokenLParan)
                {
                    return(TokenLParan);
                }

                if (sVal == TokenRParan)
                {
                    return(TokenRParan);
                }

                if (sVal == TokenComma)
                {
                    return(TokenComma);
                }
            }
            break;
            }

            Debug.Assert(false, "Should never reach here: Encountered unexpected StreamTokenizer type: " + type);

            return(null);
        }
Пример #31
0
        /// <summary>
        /// Returns the next number in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text format.
        /// The next token must be a number.
        /// </param>
        /// <returns>The next number in the stream.</returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not a number.
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private double GetNextNumber(StreamTokenizer tokenizer)
        {
            Token token = null;

            if (!tokenizer.NextToken(out token))
            {
                return(double.NaN);
            }

            TokenType type = token.Type;

            switch (type)
            {
            case TokenType.Eof:
                throw new GeometryIOException("Expected number but encountered end of stream");

            case TokenType.Eol:
                throw new GeometryIOException("Expected number but encountered end of line");

            case TokenType.Float:
                return(Convert.ToDouble(token.Object, m_objProvider));

            case TokenType.Integer:
                return(Convert.ToDouble(token.Object, m_objProvider));

            case TokenType.Word:
                throw new GeometryIOException("Expected number but encountered word: " + token.StringValue);

            default:
            {
                string sVal = token.StringValue;
                if (sVal == TokenLParan)
                {
                    throw new GeometryIOException("Expected number but encountered '('");
                }

                if (sVal == TokenRParan)
                {
                    throw new GeometryIOException("Expected number but encountered ')'");
                }

                if (sVal == TokenComma)
                {
                    throw new GeometryIOException("Expected number but encountered ','");
                }
            }
            break;
            }

            Debug.Assert(false, "Should never reach here: Encountered unexpected StreamTokenizer type: " + type);
            return(0);
        }
Пример #32
0
        /// <summary>
        /// Set the params (analyzerName only),  Comma-separate list of Analyzer class names.  If the Analyzer lives in
        /// Lucene.Net.Analysis, the name can be shortened by dropping the Lucene.Net.Analysis part of the Fully Qualified Class Name.
        /// <para/>
        /// Analyzer names may also refer to previously defined AnalyzerFactory's.
        /// <para/>
        /// Example Declaration:
        /// <code>
        /// {"NewAnalyzer" NewAnalyzer(WhitespaceAnalyzer, SimpleAnalyzer, StopAnalyzer, Standard.StandardAnalyzer) >
        /// </code>
        /// <para/>
        /// Example AnalyzerFactory usage:
        /// <code>
        /// -AnalyzerFactory(name:'whitespace tokenized',WhitespaceTokenizer)
        /// -NewAnalyzer('whitespace tokenized')
        /// </code>
        /// </summary>
        /// <param name="params">analyzerClassName, or empty for the StandardAnalyzer</param>
        public override void SetParams(string @params)
        {
            base.SetParams(@params);
            StreamTokenizer stok = new StreamTokenizer(new StringReader(@params));

            stok.QuoteChar('"');
            stok.QuoteChar('\'');
            stok.EndOfLineIsSignificant = false;
            stok.OrdinaryChar(',');
            try
            {
                while (stok.NextToken() != StreamTokenizer.TokenType_EndOfStream)
                {
                    switch (stok.TokenType)
                    {
                    case ',':
                    {
                        // Do nothing
                        break;
                    }

                    case '\'':
                    case '\"':
                    case StreamTokenizer.TokenType_Word:
                    {
                        analyzerNames.Add(stok.StringValue);
                        break;
                    }

                    default:
                    {
                        throw RuntimeException.Create("Unexpected token: " + stok.ToString());
                    }
                    }
                }
            }
            catch (Exception e) when(e.IsRuntimeException())
            {
                if (e.Message.StartsWith("Line #", StringComparison.Ordinal))
                {
                    throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details)
                }
                else
                {
                    throw RuntimeException.Create("Line #" + (stok.LineNumber + AlgLineNum) + ": ", e);
                }
            }
            catch (Exception t) when(t.IsThrowable())
            {
                throw RuntimeException.Create("Line #" + (stok.LineNumber + AlgLineNum) + ": ", t);
            }
        }
Пример #33
0
 /// <summary>
 /// Creates a new WaebricLexer which tokenizes a given stream
 /// </summary>
 /// <param name="inputStream">StreamReader to read from</param>
 public WaebricLexer(TextReader inputStream)
 {
     this.Stream = inputStream;
     tokenizer = new StreamTokenizer(Stream);
 }
Пример #34
0
        /// <exception cref="System.IO.IOException"></exception>
        public static AList<Lexeme> Tokenize(string s)
        {
            StreamTokenizer tokenizer = new StreamTokenizer(new StringReader(s));
            tokenizer.OrdinaryChar('-');
            // Don't parse minus as part of numbers.
            AList<Lexeme> tokBuf = new AList<Lexeme>();
            while (tokenizer.NextToken() != StreamTokenizer.TT_EOF)
            {
                switch (tokenizer.ttype)
                {
                    case StreamTokenizer.TT_NUMBER:
                    {
                        tokBuf.AddItem(new Lexeme(Lexeme.NUMBER, tokenizer.sval.ToString()));
                        break;
                    }

                    case StreamTokenizer.TT_WORD:
                    {
                        tokBuf.AddItem(new Lexeme(Lexeme.WORD, tokenizer.sval));
                        break;
                    }

                    default:
                    {
                        // operator
                        if ((char)tokenizer.ttype.ToString().Equals("("))
                        {
                            tokBuf.AddItem(new Lexeme(Lexeme.LPAREN, (char)tokenizer.ttype.ToString()));
                        }
                        else
                        {
                            if ((char)tokenizer.ttype.ToString().Equals(")"))
                            {
                                tokBuf.AddItem(new Lexeme(Lexeme.RPAREN, (char)tokenizer.ttype.ToString()));
                            }
                            else
                            {
                                if ((char)tokenizer.ttype.ToString().Equals(","))
                                {
                                    tokBuf.AddItem(new Lexeme(Lexeme.COMMA, (char)tokenizer.ttype.ToString()));
                                }
                                else
                                {
                                    tokBuf.AddItem(new Lexeme(Lexeme.OPERATOR, (char)tokenizer.ttype.ToString()));
                                }
                            }
                        }
                        break;
                        break;
                    }
                }
            }
            return tokBuf;
        }
Пример #35
0
        public void TestWaebricInput()
        {
            //Set up tokenizer and input for tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("module test\n\nsite site/index.html : home()\nend"));

            //Tokenize stream and do some test with it
            int current = Tokenizer.NextToken();
            int position = 1;
            while(current != StreamTokenizer.EOF)
            {
                if (current == StreamTokenizer.LAYOUT)
                {
                    current = Tokenizer.NextToken();
                    continue; //ignore layout
                }
                switch (position)
                {
                    case 1: //module
                        Assert.AreEqual("module", Tokenizer.GetTextValue());
                        break;
                    case 2: //test
                        Assert.AreEqual("test", Tokenizer.GetTextValue());
                        break;
                    case 3: //site
                        Assert.AreEqual("site", Tokenizer.GetTextValue());
                        break;
                    case 4: //site
                        Assert.AreEqual("site", Tokenizer.GetTextValue());
                        break;
                    case 5: // /
                        Assert.AreEqual('/', Tokenizer.GetCharacterValue());
                        break;
                    case 6: //index
                        Assert.AreEqual("index", Tokenizer.GetTextValue());
                        break;
                    case 7: //.
                        Assert.AreEqual('.', Tokenizer.GetCharacterValue());
                        break;
                    case 8: //html
                        Assert.AreEqual("html", Tokenizer.GetTextValue());
                        break;
                    case 9: //:
                        Assert.AreEqual(':', Tokenizer.GetCharacterValue());
                        break;
                    case 10: //home
                        Assert.AreEqual("home", Tokenizer.GetTextValue());
                        break;
                    case 11: //(
                        Assert.AreEqual('(', Tokenizer.GetCharacterValue());
                        break;
                    case 12: //)
                        Assert.AreEqual(')', Tokenizer.GetCharacterValue());
                        break;
                    case 13: //end
                        Assert.AreEqual("end", Tokenizer.GetTextValue());
                        break;
                }
                position++;
                current = Tokenizer.NextToken();
            }
        }