Exemplo n.º 1
0
        public void TestReadNumber()
        {
            string          test1      = "this is a 123.5 notAnumber";
            TextReader      reader     = new StringReader(test1);
            StreamTokenizer tokenizer  = new StreamTokenizer(reader, true);
            TokenType       tokentype1 = tokenizer.NextToken();       //this
            TokenType       tokentype2 = tokenizer.NextToken();       //is
            TokenType       tokentype3 = tokenizer.NextToken();       //a

            tokenizer.NextToken();

            double number = tokenizer.GetNumericValue();

            Assertion.AssertEquals("test1", 123.5, number);

            tokenizer.NextToken();

            try
            {
                double number2 = tokenizer.GetNumericValue();
                Assertion.Fail("This should fail because the token is not a number.");
            }
            catch
            {
            }
        }
Exemplo n.º 2
0
        static void Main(string[] args)
        {
            // DEMO BOILERPLATE:
            // create a test stream, with some sample text in it
            MemoryStream ms = new MemoryStream();
            StreamWriter sw = new StreamWriter(ms);

            sw.WriteLine("This is a       sample stream");
            sw.WriteLine("with\tsome # this is a comment");
            sw.WriteLine("   (example) text // this is another comment");
            sw.WriteLine("and several /comments");
            sw.Flush();
            ms.Seek(0, SeekOrigin.Begin);

            // and open a reader on it:
            StreamReader sr = new StreamReader(ms);
            // could also just open a file:
            //StreamReader sr = new StreamReader("inputfile.txt");

            // DEMO START:
            StreamTokenizer tok = new StreamTokenizer(sr,
                                                      null,                      // whitespace delimiters
                                                      new char [] { '#', '/' }); // comment start chars are tokens too

            string s;

            while ((s = tok.NextToken()) != null)
            {
                Console.WriteLine("line {0} token: '{1}'", tok.Linenum, s);
                if (s == "#")
                {
                    Console.WriteLine("  (# comment, skipping to end of line)");
                    tok.SkipToEOL();
                }
                if (s == "/")
                {
                    // peek at the next token to see if it is our
                    // second slash:
                    s = tok.NextToken();
                    if (s == null)
                    {
                        break;
                    }
                    if (s == "/")
                    {
                        Console.WriteLine("  (// comment, skipping)");
                        tok.SkipToEOL();
                    }
                    else
                    {
                        // no, so push the token back into the tokenizer
                        tok.Unget(s);
                    }
                }
            }

            Console.WriteLine("[Hit return to exit]");
            Console.ReadLine();
        }
Exemplo n.º 3
0
        private bool ParseSentencesFromKIFFile()
        {
            using (var sr = new StreamReader(kifFilePath))
            {
                try
                {
                    var st = new StreamTokenizer(sr)
                    {
                        EOLIsSignificant = true
                    };
                    Token nextToken = null;
                    int   parenCount;

                    while ((nextToken = st.NextToken()).TheTokenType != TokenType.EndOfFile)
                    {
                        var input = new List <Token>();
                        parenCount = 0;

                        while (nextToken.TheTokenType != TokenType.EndOfFile)
                        {
                            input.Add(nextToken);

                            if (nextToken.TheTokenType == TokenType.LeftParen)
                            {
                                parenCount++;
                            }
                            else if (nextToken.TheTokenType == TokenType.RightParen)
                            {
                                if (--parenCount == 0)
                                {
                                    goto EndOfSentence;
                                }
                            }

                            nextToken = st.NextToken();
                        }

EndOfSentence:
                        sentences.Add(input);
                    }
                }
                catch (Exception ex)
                {
                    throw;
                }
            }

            return(true);
        }
Exemplo n.º 4
0
        private double GetNextNumber(StreamTokenizer tokenizer)
        {
            int type = tokenizer.NextToken();

            if (type == StreamTokenizer.TT_WORD)
            {
                if (tokenizer.StringValue.Equals(NAN_SYMBOL))
                {
                    return(Double.NaN);
                }
                else
                {
                    try
                    {
                        NumberFormatInfo NFI = new NumberFormatInfo()
                        {
                            NumberDecimalSeparator = "."
                        };
                        return(Double.Parse(tokenizer.StringValue, NFI));
                    }
                    catch (FormatException ex)
                    {
                        throw new WKTParseException(ex);
                    }
                }
            }

            throw new WKTParseException("Excepted: number");
        }
Exemplo n.º 5
0
        private static String GetNextWord(StreamTokenizer tokenizer)
        {
            try
            {
                int type = tokenizer.NextToken();

                switch (type)
                {
                case StreamTokenizer.TT_WORD:
                    String word = tokenizer.StringValue;

                    if (word.Equals(EMPTY))
                    {
                        return(EMPTY);
                    }
                    return(word);

                case '(':
                    return(L_PAREN);

                case ')':
                    return(R_PAREN);

                case ',':
                    return(COMMA);
                }

                throw new WKTParseException($"Uknown type: '{( char )type}'");
            }
            catch (IOException ex)
            {
                throw new WKTParseException(ex);
            }
        }
Exemplo n.º 6
0
 /// <summary>
 /// Gets the next word from the tokenizer
 /// @throws StreamCorruptedException if the word does not match
 /// @throws IOException              if an error occurs while loading the data
 /// </summary>
 /// <returns>the next word</returns>
 public string GetString()
 {
     if (_tokenizer == null)
     {
         return(null);
     }
     if (_putbackList.Count != 0)
     {
         var retVal = _putbackList[_putbackList.Count - 1];
         _putbackList.RemoveAt(_putbackList.Count - 1);
         return(retVal);
     }
     _tokenizer.NextToken();
     if (_tokenizer.Ttype == StreamTokenizer.TtEOF)
     {
         _atEof = true;
     }
     if (_tokenizer.Ttype != StreamTokenizer.TtWord && _tokenizer.Ttype != StreamTokenizer.TtEol &&
         _tokenizer.Ttype != StreamTokenizer.TtEOF)
     {
         Corrupt("word expected but not found");
     }
     if (_tokenizer.Ttype == StreamTokenizer.TtEol || _tokenizer.Ttype == StreamTokenizer.TtEOF)
     {
         return(null);
     }
     return(_tokenizer.StringValue);
 }
Exemplo n.º 7
0
        private static char Unescape(String text)
        {
            StreamTokenizer parser = new StreamTokenizer(new StringReader(text));

            parser.NextToken();
            return(parser.StringValue[0]);
        }
Exemplo n.º 8
0
        public void TestTokenize5()
        {
            string          test1       = "-2.5 -2 4";
            TextReader      reader      = new StringReader(test1);
            StreamTokenizer tokenizer   = new StreamTokenizer(reader, true);
            TokenType       tokentype   = tokenizer.NextToken();
            int             iTokenCount = 0;

            while (tokentype != TokenType.Eof)
            {
                tokentype = tokenizer.NextToken();
                iTokenCount++;
            }
            // first token will be -2.5
            Assertion.AssertEquals("token count", 3, iTokenCount);
        }
Exemplo n.º 9
0
        public void TestTokenize3()
        {
            string          test1       = "this is a very_long_word and long123 long123longer ok";
            TextReader      reader      = new StringReader(test1);
            StreamTokenizer tokenizer   = new StreamTokenizer(reader, true);
            TokenType       tokentype   = tokenizer.NextToken();
            int             iTokenCount = 0;

            while (tokentype != TokenType.Eof)
            {
                //Console.WriteLine("token:"+tokentype+"("+tokenizer.GetStringValue()+")");
                tokentype = tokenizer.NextToken();
                iTokenCount++;
            }
            //Console.WriteLine("token count ignore=false="+iTokenCount);
            Assertion.AssertEquals("token count", 8, iTokenCount);
        }
Exemplo n.º 10
0
        public void TestTokenize2()
        {
            string          test1       = "this,.is  123 test 456.789 test123 123.2 12*2 /* hello */ \n hello";
            TextReader      reader      = new StringReader(test1);
            StreamTokenizer tokenizer   = new StreamTokenizer(reader, false);
            TokenType       tokentype   = tokenizer.NextToken();
            int             iTokenCount = 0;

            while (tokentype != TokenType.Eof)
            {
                //Console.WriteLine("token:"+tokentype+"("+tokenizer.GetStringValue()+")");
                tokentype = tokenizer.NextToken();
                iTokenCount++;
            }
            //Console.WriteLine("token count ignore=false="+iTokenCount);
            Assertion.AssertEquals("token count", 30, iTokenCount);
        }
Exemplo n.º 11
0
        private static bool HasNext(StreamTokenizer st)
        {
            if (st == null)
            {
                throw new ArgumentNullException(nameof(st));
            }

            return(st.NextToken() != StreamTokenizer.TtEof);
        }
Exemplo n.º 12
0
        public void NumericTokenTest()
        {
            //Set up tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("100"));

            Tokenizer.NextToken();

            //Test token
            Assert.AreEqual(100, Tokenizer.GetNumericValue());
        }
Exemplo n.º 13
0
        public void NumericTokenTest()
        {
            //Set up tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("100"));

            Tokenizer.NextToken();

            //Test token
            Assert.AreEqual(100, Tokenizer.GetNumericValue());
        }
Exemplo n.º 14
0
            /// <exception cref="System.IO.IOException"/>
            internal virtual Parser.Token Next()
            {
                int type = tok.NextToken();

                switch (type)
                {
                case StreamTokenizer.TtEof:
                case StreamTokenizer.TtEol:
                {
                    return(null);
                }

                case StreamTokenizer.TtNumber:
                {
                    return(new Parser.NumToken(tok.nval));
                }

                case StreamTokenizer.TtWord:
                {
                    return(new Parser.StrToken(Parser.TType.Ident, tok.sval));
                }

                case '"':
                {
                    return(new Parser.StrToken(Parser.TType.Quot, tok.sval));
                }

                default:
                {
                    switch (type)
                    {
                    case ',':
                    {
                        return(new Parser.Token(Parser.TType.Comma));
                    }

                    case '(':
                    {
                        return(new Parser.Token(Parser.TType.Lparen));
                    }

                    case ')':
                    {
                        return(new Parser.Token(Parser.TType.Rparen));
                    }

                    default:
                    {
                        throw new IOException("Unexpected: " + type);
                    }
                    }
                    break;
                }
                }
            }
Exemplo n.º 15
0
        /// <summary>
        /// Reads partial atomic charges and add the to the given ChemModel.
        /// </summary>
        /// <param name="model"></param>
        private void ReadPartialCharges(IChemModel model)
        {
            Trace.TraceInformation("Reading partial atomic charges");
            var            moleculeSet = model.MoleculeSet;
            IAtomContainer molecule    = moleculeSet[0];
            string         line        = input.ReadLine();

            // skip first line after "Total atomic charges"
            while (true)
            {
                line = input.ReadLine();
                Debug.WriteLine($"Read charge block line: {line}");
                if ((line == null) || line.Contains("Sum of Mulliken charges"))
                {
                    Debug.WriteLine("End of charge block found");
                    break;
                }
                StringReader    sr        = new StringReader(line);
                StreamTokenizer tokenizer = new StreamTokenizer(sr);
                if (tokenizer.NextToken() == StreamTokenizer.TTypeNumber)
                {
                    int atomCounter = (int)tokenizer.NumberValue;

                    tokenizer.NextToken();
                    // ignore the symbol

                    double charge;
                    if (tokenizer.NextToken() == StreamTokenizer.TTypeNumber)
                    {
                        charge = tokenizer.NumberValue;
                        Debug.WriteLine("Found charge for atom " + atomCounter + ": " + charge);
                    }
                    else
                    {
                        throw new CDKException("Error while reading charge: expected double.");
                    }
                    IAtom atom = molecule.Atoms[atomCounter - 1];
                    atom.Charge = charge;
                }
            }
        }
Exemplo n.º 16
0
        /// <summary>
        /// Returns the next word in the stream as uppercase text.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text
        /// format. The next token must be a word.
        /// </param>
        /// <returns>
        /// The next word in the stream as uppercase text.
        /// </returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not a word.
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private string GetNextWord(StreamTokenizer tokenizer)
        {
            Token token = null;

            if (!tokenizer.NextToken(out token))
            {
                return(null);
            }

            TokenType type = token.Type;

            switch (type)
            {
            case TokenType.Eof:
                throw new GeometryIOException("Expected word but encountered end of stream");

            case TokenType.Eol:
                throw new GeometryIOException("Expected word but encountered end of line");

            case TokenType.Float:
                throw new GeometryIOException("Expected word but encountered number: " + token.StringValue);

            case TokenType.Integer:
                throw new GeometryIOException("Expected word but encountered number: " + token.StringValue);

            case TokenType.Word:
                return(token.StringValue.ToUpper(CultureInfo.InvariantCulture));

            default:
            {
                string sVal = token.StringValue;
                if (sVal == TokenLParan)
                {
                    return(TokenLParan);
                }

                if (sVal == TokenRParan)
                {
                    return(TokenRParan);
                }

                if (sVal == TokenComma)
                {
                    return(TokenComma);
                }
            }
            break;
            }

            Debug.Assert(false, "Should never reach here: Encountered unexpected StreamTokenizer type: " + type);

            return(null);
        }
Exemplo n.º 17
0
        /// <summary>
        /// Set the params (analyzerName only),  Comma-separate list of Analyzer class names.  If the Analyzer lives in
        /// Lucene.Net.Analysis, the name can be shortened by dropping the Lucene.Net.Analysis part of the Fully Qualified Class Name.
        /// <para/>
        /// Analyzer names may also refer to previously defined AnalyzerFactory's.
        /// <para/>
        /// Example Declaration:
        /// <code>
        /// {"NewAnalyzer" NewAnalyzer(WhitespaceAnalyzer, SimpleAnalyzer, StopAnalyzer, Standard.StandardAnalyzer) >
        /// </code>
        /// <para/>
        /// Example AnalyzerFactory usage:
        /// <code>
        /// -AnalyzerFactory(name:'whitespace tokenized',WhitespaceTokenizer)
        /// -NewAnalyzer('whitespace tokenized')
        /// </code>
        /// </summary>
        /// <param name="params">analyzerClassName, or empty for the StandardAnalyzer</param>
        public override void SetParams(string @params)
        {
            base.SetParams(@params);
            StreamTokenizer stok = new StreamTokenizer(new StringReader(@params));

            stok.QuoteChar('"');
            stok.QuoteChar('\'');
            stok.EndOfLineIsSignificant = false;
            stok.OrdinaryChar(',');
            try
            {
                while (stok.NextToken() != StreamTokenizer.TokenType_EndOfStream)
                {
                    switch (stok.TokenType)
                    {
                    case ',':
                    {
                        // Do nothing
                        break;
                    }

                    case '\'':
                    case '\"':
                    case StreamTokenizer.TokenType_Word:
                    {
                        analyzerNames.Add(stok.StringValue);
                        break;
                    }

                    default:
                    {
                        throw RuntimeException.Create("Unexpected token: " + stok.ToString());
                    }
                    }
                }
            }
            catch (Exception e) when(e.IsRuntimeException())
            {
                if (e.Message.StartsWith("Line #", StringComparison.Ordinal))
                {
                    throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details)
                }
                else
                {
                    throw RuntimeException.Create("Line #" + (stok.LineNumber + AlgLineNum) + ": ", e);
                }
            }
            catch (Exception t) when(t.IsThrowable())
            {
                throw RuntimeException.Create("Line #" + (stok.LineNumber + AlgLineNum) + ": ", t);
            }
        }
Exemplo n.º 18
0
        /// <summary>
        /// Returns the next number in the stream.
        /// </summary>
        /// <param name="tokenizer">
        /// Tokenizer over a stream of text in Well-known Text format.
        /// The next token must be a number.
        /// </param>
        /// <returns>The next number in the stream.</returns>
        /// <exception cref="GeometryIOException">
        /// If the next token is not a number.
        /// </exception>
        /// <exception cref="IOException">
        /// If an I/O error occurs.
        /// </exception>
        private double GetNextNumber(StreamTokenizer tokenizer)
        {
            Token token = null;

            if (!tokenizer.NextToken(out token))
            {
                return(double.NaN);
            }

            TokenType type = token.Type;

            switch (type)
            {
            case TokenType.Eof:
                throw new GeometryIOException("Expected number but encountered end of stream");

            case TokenType.Eol:
                throw new GeometryIOException("Expected number but encountered end of line");

            case TokenType.Float:
                return(Convert.ToDouble(token.Object, m_objProvider));

            case TokenType.Integer:
                return(Convert.ToDouble(token.Object, m_objProvider));

            case TokenType.Word:
                throw new GeometryIOException("Expected number but encountered word: " + token.StringValue);

            default:
            {
                string sVal = token.StringValue;
                if (sVal == TokenLParan)
                {
                    throw new GeometryIOException("Expected number but encountered '('");
                }

                if (sVal == TokenRParan)
                {
                    throw new GeometryIOException("Expected number but encountered ')'");
                }

                if (sVal == TokenComma)
                {
                    throw new GeometryIOException("Expected number but encountered ','");
                }
            }
            break;
            }

            Debug.Assert(false, "Should never reach here: Encountered unexpected StreamTokenizer type: " + type);
            return(0);
        }
Exemplo n.º 19
0
 private static bool IsNumberText(StreamTokenizer tokenizer)
 {
     try
     {
         int type = tokenizer.NextToken();
         tokenizer.PushBack();
         return(type == StreamTokenizer.TT_WORD);
     }
     catch (IOException ex)
     {
         throw new WKTParseException(ex);
     }
 }
Exemplo n.º 20
0
 private static bool IsOpenerNext(StreamTokenizer tokenizer)
 {
     try
     {
         int type = tokenizer.NextToken();
         tokenizer.PushBack();
         return(type == '(');
     }
     catch (IOException ex)
     {
         throw new WKTParseException(ex);
     }
 }
Exemplo n.º 21
0
        /// <summary>
        /// Set the params (analyzerName only),  Comma-separate list of Analyzer class names.  If the Analyzer lives in
        /// Lucene.Net.Analysis, the name can be shortened by dropping the Lucene.Net.Analysis part of the Fully Qualified Class Name.
        /// <para/>
        /// Analyzer names may also refer to previously defined AnalyzerFactory's.
        /// <para/>
        /// Example Declaration:
        /// <code>
        /// {"NewAnalyzer" NewAnalyzer(WhitespaceAnalyzer, SimpleAnalyzer, StopAnalyzer, Standard.StandardAnalyzer) >
        /// </code>
        /// <para/>
        /// Example AnalyzerFactory usage:
        /// <code>
        /// -AnalyzerFactory(name:'whitespace tokenized',WhitespaceTokenizer)
        /// -NewAnalyzer('whitespace tokenized')
        /// </code>
        /// </summary>
        /// <param name="params">analyzerClassName, or empty for the StandardAnalyzer</param>
        public override void SetParams(string @params)
        {
            base.SetParams(@params);
            StreamTokenizer stok = new StreamTokenizer(new StringReader(@params));

            stok.QuoteChar('"');
            stok.QuoteChar('\'');
            stok.IsEOLSignificant = false;
            stok.OrdinaryChar(',');
            try
            {
                while (stok.NextToken() != StreamTokenizer.TT_EOF)
                {
                    switch (stok.TokenType)
                    {
                    case ',':
                    {
                        // Do nothing
                        break;
                    }

                    case '\'':
                    case '\"':
                    case StreamTokenizer.TT_WORD:
                    {
                        analyzerNames.Add(stok.StringValue);
                        break;
                    }

                    default:
                    {
                        //throw new RuntimeException("Unexpected token: " + stok.ToString());
                        throw new Exception("Unexpected token: " + stok.ToString());
                    }
                    }
                }
            }
            catch (Exception e)
            {
                if (e.Message.StartsWith("Line #", StringComparison.Ordinal))
                {
                    throw e;
                }
                else
                {
                    throw new Exception("Line #" + (stok.LineNumber + AlgLineNum) + ": ", e);
                }
            }
        }
Exemplo n.º 22
0
        /// <summary>
        /// Tests if reader is at EOF.
        /// </summary>
        private static bool IsAtEndOfFile(StreamReader bufferedReader)
        {
            var position = bufferedReader.BaseStream.Position;

            var   tokenizer = new StreamTokenizer(bufferedReader);
            Token t;

            if (!tokenizer.NextToken(out t) || t is EofToken)
            {
                return(true);
            }

            bufferedReader.BaseStream.Seek(position, SeekOrigin.Begin);
            return(false);
        }
Exemplo n.º 23
0
        private static void CheckValid(StreamTokenizer st, string token)
        {
            if (st == null)
            {
                throw new ArgumentNullException(nameof(st));
            }
            if (token == null)
            {
                throw new ArgumentNullException(nameof(token));
            }

            if (st.NextToken() != '[')
            {
                throw new IOException(string.Concat(token, " not followed by ["));
            }
        }
Exemplo n.º 24
0
        private bool IsNumberNext(StreamTokenizer tokenizer)
        {
            try
            {
                Token token = null;
                if (!tokenizer.NextToken(out token))
                {
                    return(false);
                }

                return(token.Type == TokenType.Float ||
                       token.Type == TokenType.Integer);
            }
            finally
            {
                tokenizer.PushBack();
            }
        }
Exemplo n.º 25
0
 public String readNextToken()
 {
     try
     {
         if (st.NextToken() == StreamTokenizer.TT_EOF)
         {
             return(null);
         }
         else
         {
             return(st.StringValue);
         }
     }
     catch (Exception e)
     {
         string ex = e.ToString();
         return(null);
     }
 }
Exemplo n.º 26
0
        private double[] GetCoordinate(StreamTokenizer tokenizer, bool tryParen)
        {
            bool opened;

            if (opened = tryParen && IsOpenerNext(tokenizer))
            {
                tokenizer.NextToken();
            }

            double[] sequence = new double[2];
            sequence[0] = GetNextNumber(tokenizer);
            sequence[1] = GetNextNumber(tokenizer);

            if (opened)
            {
                GetNextCloser(tokenizer);
            }

            return(sequence);
        }
        /// <summary>Internally fetches the next token.</summary>
        /// <returns>the next token in the token stream, or null if none exists.</returns>
        protected internal override string GetNext()
        {
            try
            {
                int nextTok = st.NextToken();
                switch (nextTok)
                {
                case StreamTokenizer.TtEol:
                {
                    return(eolString);
                }

                case StreamTokenizer.TtEof:
                {
                    return(null);
                }

                case StreamTokenizer.TtWord:
                {
                    return(st.sval);
                }

                case StreamTokenizer.TtNumber:
                {
                    return(double.ToString(st.nval));
                }

                default:
                {
                    char[] t = new char[] { (char)nextTok };
                    // (array initialization)
                    return(new string(t));
                }
                }
            }
            catch (IOException)
            {
                // do nothing, return null
                return(null);
            }
        }
Exemplo n.º 28
0
        /// <summary>
        /// Read algorithm from file.
        /// Property examined: alt.tasks.packages == comma separated list of
        /// alternate Assembly names where tasks would be searched for, when not found
        /// in the default Assembly (that of <see cref="PerfTask"/>).
        /// If the same task class appears in more than one Assembly, the Assembly
        /// indicated first in this list will be used.
        /// <para/>
        /// The Lucene.Net implementation differs from Lucene in that all
        /// referenced assemblies are also scanned for the type. However,
        /// alt.tasks.packages may be included for assemblies that are
        /// not referenced in your project.
        /// </summary>
        /// <param name="runData">perf-run-data used at running the tasks.</param>
        /// <exception cref="Exception">if errors while parsing the algorithm.</exception>
        public Algorithm(PerfRunData runData)
        {
            Config config = runData.Config;

            taskPackages = InitTasksPackages(config);
            string algTxt = config.AlgorithmText;

            sequence = new TaskSequence(runData, null, null, false);
            TaskSequence    currSequence = sequence;
            PerfTask        prevTask     = null;
            StreamTokenizer stok         = new StreamTokenizer(new StringReader(algTxt));

            stok.CommentChar('#');
            stok.IsEOLSignificant = false;
            stok.QuoteChar('"');
            stok.QuoteChar('\'');
            stok.OrdinaryChar('/');
            stok.OrdinaryChar('(');
            stok.OrdinaryChar(')');
            bool colonOk = false;
            bool isDisableCountNextTask = false; // only for primitive tasks

            currSequence.Depth = 0;

            while (stok.NextToken() != StreamTokenizer.TT_EOF)
            {
                switch (stok.TokenType)
                {
                case StreamTokenizer.TT_WORD:
                    string   s    = stok.StringValue;
                    PerfTask task = (PerfTask)Activator.CreateInstance(TaskClass(config, s), runData);
                    task.AlgLineNum        = stok.LineNumber;
                    task.DisableCounting   = isDisableCountNextTask;
                    isDisableCountNextTask = false;
                    currSequence.AddTask(task);
                    if (task is RepSumByPrefTask)
                    {
                        stok.NextToken();
                        string prefix = stok.StringValue;
                        if (prefix == null || prefix.Length == 0)
                        {
                            throw new Exception("named report prefix problem - " + stok.ToString());
                        }
                        ((RepSumByPrefTask)task).SetPrefix(prefix);
                    }
                    // check for task param: '(' someParam ')'
                    stok.NextToken();
                    if (stok.TokenType != '(')
                    {
                        stok.PushBack();
                    }
                    else
                    {
                        // get params, for tasks that supports them - allow recursive parenthetical expressions
                        stok.IsEOLSignificant = true;      // Allow params tokenizer to keep track of line number
                        StringBuilder @params = new StringBuilder();
                        stok.NextToken();
                        if (stok.TokenType != ')')
                        {
                            int count = 1;
                            while (true)
                            {
                                switch (stok.TokenType)
                                {
                                case StreamTokenizer.TT_NUMBER:
                                {
                                    @params.Append(stok.NumberValue);
                                    break;
                                }

                                case StreamTokenizer.TT_WORD:
                                {
                                    @params.Append(stok.StringValue);
                                    break;
                                }

                                case StreamTokenizer.TT_EOF:
                                {
                                    throw new Exception("Unexpexted EOF: - " + stok.ToString());
                                }

                                case '"':
                                case '\'':
                                {
                                    @params.Append((char)stok.TokenType);
                                    // re-escape delimiters, if any
                                    @params.Append(stok.StringValue.Replace("" + (char)stok.TokenType, @"\" + (char)stok.TokenType));
                                    @params.Append((char)stok.TokenType);
                                    break;
                                }

                                case '(':
                                {
                                    @params.Append((char)stok.TokenType);
                                    ++count;
                                    break;
                                }

                                case ')':
                                {
                                    if (--count >= 1)
                                    {              // exclude final closing parenthesis
                                        @params.Append((char)stok.TokenType);
                                    }
                                    else
                                    {
                                        goto BALANCED_PARENS_BREAK;
                                    }
                                    break;
                                }

                                default:
                                {
                                    @params.Append((char)stok.TokenType);
                                    break;
                                }
                                }
                                stok.NextToken();
                            }
                            BALANCED_PARENS_BREAK : { }
                        }
                        stok.IsEOLSignificant = false;
                        string prm = @params.ToString().Trim();
                        if (prm.Length > 0)
                        {
                            task.SetParams(prm);
                        }
                    }

                    // ---------------------------------------
                    colonOk = false; prevTask = task;
                    break;

                default:
                    char c = (char)stok.TokenType;

                    switch (c)
                    {
                    case ':':
                        if (!colonOk)
                        {
                            throw new Exception("colon unexpexted: - " + stok.ToString());
                        }
                        colonOk = false;
                        // get repetitions number
                        stok.NextToken();
                        if ((char)stok.TokenType == '*')
                        {
                            ((TaskSequence)prevTask).SetRepetitions(TaskSequence.REPEAT_EXHAUST);
                        }
                        else
                        {
                            if (stok.TokenType != StreamTokenizer.TT_NUMBER)
                            {
                                throw new Exception("expected repetitions number or XXXs: - " + stok.ToString());
                            }
                            else
                            {
                                double num = stok.NumberValue;
                                stok.NextToken();
                                if (stok.TokenType == StreamTokenizer.TT_WORD && stok.StringValue.Equals("s", StringComparison.Ordinal))
                                {
                                    ((TaskSequence)prevTask).SetRunTime(num);
                                }
                                else
                                {
                                    stok.PushBack();
                                    ((TaskSequence)prevTask).SetRepetitions((int)num);
                                }
                            }
                        }
                        // check for rate specification (ops/min)
                        stok.NextToken();
                        if (stok.TokenType != ':')
                        {
                            stok.PushBack();
                        }
                        else
                        {
                            // get rate number
                            stok.NextToken();
                            if (stok.TokenType != StreamTokenizer.TT_NUMBER)
                            {
                                throw new Exception("expected rate number: - " + stok.ToString());
                            }
                            // check for unit - min or sec, sec is default
                            stok.NextToken();
                            if (stok.TokenType != '/')
                            {
                                stok.PushBack();
                                ((TaskSequence)prevTask).SetRate((int)stok.NumberValue, false);         // set rate per sec
                            }
                            else
                            {
                                stok.NextToken();
                                if (stok.TokenType != StreamTokenizer.TT_WORD)
                                {
                                    throw new Exception("expected rate unit: 'min' or 'sec' - " + stok.ToString());
                                }
                                string unit = stok.StringValue.ToLowerInvariant();
                                if ("min".Equals(unit, StringComparison.Ordinal))
                                {
                                    ((TaskSequence)prevTask).SetRate((int)stok.NumberValue, true);         // set rate per min
                                }
                                else if ("sec".Equals(unit, StringComparison.Ordinal))
                                {
                                    ((TaskSequence)prevTask).SetRate((int)stok.NumberValue, false);         // set rate per sec
                                }
                                else
                                {
                                    throw new Exception("expected rate unit: 'min' or 'sec' - " + stok.ToString());
                                }
                            }
                        }
                        colonOk = false;
                        break;

                    case '{':
                    case '[':
                        // a sequence
                        // check for sequence name
                        string name = null;
                        stok.NextToken();
                        if (stok.TokenType != '"')
                        {
                            stok.PushBack();
                        }
                        else
                        {
                            name = stok.StringValue;
                            if (stok.TokenType != '"' || name == null || name.Length == 0)
                            {
                                throw new Exception("sequence name problem - " + stok.ToString());
                            }
                        }
                        // start the sequence
                        TaskSequence seq2 = new TaskSequence(runData, name, currSequence, c == '[');
                        currSequence.AddTask(seq2);
                        currSequence = seq2;
                        colonOk      = false;
                        break;

                    case '&':
                        if (currSequence.IsParallel)
                        {
                            throw new Exception("Can only create background tasks within a serial task");
                        }
                        stok.NextToken();
                        int deltaPri;
                        if (stok.TokenType != StreamTokenizer.TT_NUMBER)
                        {
                            stok.PushBack();
                            deltaPri = 0;
                        }
                        else
                        {
                            // priority
                            deltaPri = (int)stok.NumberValue;
                        }

                        if (prevTask == null)
                        {
                            throw new Exception("& was unexpected");
                        }
                        else if (prevTask.RunInBackground)
                        {
                            throw new Exception("double & was unexpected");
                        }
                        else
                        {
                            prevTask.SetRunInBackground(deltaPri);
                        }
                        break;

                    case '>':
                        currSequence.SetNoChildReport();         /* intentional fallthrough */
                        // end sequence
                        colonOk      = true; prevTask = currSequence;
                        currSequence = currSequence.Parent;
                        break;

                    case '}':
                    case ']':
                        // end sequence
                        colonOk      = true; prevTask = currSequence;
                        currSequence = currSequence.Parent;
                        break;

                    case '-':
                        isDisableCountNextTask = true;
                        break;
                    }     //switch(c)
                    break;
                } //switch(stok.ttype)
            }

            if (sequence != currSequence)
            {
                throw new Exception("Unmatched sequences");
            }

            // remove redundant top level enclosing sequences
            while (sequence.IsCollapsable && sequence.Repetitions == 1 && sequence.GetRate() == 0)
            {
                IList <PerfTask> t = sequence.Tasks;
                if (t != null && t.Count == 1)
                {
                    PerfTask p = t[0];
                    if (p is TaskSequence)
                    {
                        sequence = (TaskSequence)p;
                        continue;
                    }
                }
                break;
            }
        }
Exemplo n.º 29
0
        /// <exception cref="System.IO.IOException"></exception>
        public static AList<Lexeme> Tokenize(string s)
        {
            StreamTokenizer tokenizer = new StreamTokenizer(new StringReader(s));
            tokenizer.OrdinaryChar('-');
            // Don't parse minus as part of numbers.
            AList<Lexeme> tokBuf = new AList<Lexeme>();
            while (tokenizer.NextToken() != StreamTokenizer.TT_EOF)
            {
                switch (tokenizer.ttype)
                {
                    case StreamTokenizer.TT_NUMBER:
                    {
                        tokBuf.AddItem(new Lexeme(Lexeme.NUMBER, tokenizer.sval.ToString()));
                        break;
                    }

                    case StreamTokenizer.TT_WORD:
                    {
                        tokBuf.AddItem(new Lexeme(Lexeme.WORD, tokenizer.sval));
                        break;
                    }

                    default:
                    {
                        // operator
                        if ((char)tokenizer.ttype.ToString().Equals("("))
                        {
                            tokBuf.AddItem(new Lexeme(Lexeme.LPAREN, (char)tokenizer.ttype.ToString()));
                        }
                        else
                        {
                            if ((char)tokenizer.ttype.ToString().Equals(")"))
                            {
                                tokBuf.AddItem(new Lexeme(Lexeme.RPAREN, (char)tokenizer.ttype.ToString()));
                            }
                            else
                            {
                                if ((char)tokenizer.ttype.ToString().Equals(","))
                                {
                                    tokBuf.AddItem(new Lexeme(Lexeme.COMMA, (char)tokenizer.ttype.ToString()));
                                }
                                else
                                {
                                    tokBuf.AddItem(new Lexeme(Lexeme.OPERATOR, (char)tokenizer.ttype.ToString()));
                                }
                            }
                        }
                        break;
                        break;
                    }
                }
            }
            return tokBuf;
        }
Exemplo n.º 30
0
        /// <summary>
        /// Reads a set of coordinates into ChemFrame.
        /// </summary>
        /// <param name="model"></param>
        private void ReadCoordinates(IChemModel model)
        {
            var            moleculeSet = model.Builder.NewAtomContainerSet();
            IAtomContainer molecule    = model.Builder.NewAtomContainer();
            string         line        = input.ReadLine();

            line = input.ReadLine();
            line = input.ReadLine();
            line = input.ReadLine();
            while (true)
            {
                line = input.ReadLine();
                if ((line == null) || (line.Contains("-----")))
                {
                    break;
                }
                int             atomicNumber;
                StringReader    sr    = new StringReader(line);
                StreamTokenizer token = new StreamTokenizer(sr);
                token.NextToken();

                // ignore first token
                if (token.NextToken() == StreamTokenizer.TTypeNumber)
                {
                    atomicNumber = (int)token.NumberValue;
                    if (atomicNumber == 0)
                    {
                        // Skip dummy atoms. Dummy atoms must be skipped
                        // if frequencies are to be read because Gaussian
                        // does not report dummy atoms in frequencies, and
                        // the number of atoms is used for reading frequencies.
                        continue;
                    }
                }
                else
                {
                    throw new CDKException("Error while reading coordinates: expected integer.");
                }
                token.NextToken();

                // ignore third token
                double x;
                double y;
                double z;
                if (token.NextToken() == StreamTokenizer.TTypeNumber)
                {
                    x = token.NumberValue;
                }
                else
                {
                    throw new IOException("Error reading x coordinate");
                }
                if (token.NextToken() == StreamTokenizer.TTypeNumber)
                {
                    y = token.NumberValue;
                }
                else
                {
                    throw new IOException("Error reading y coordinate");
                }
                if (token.NextToken() == StreamTokenizer.TTypeNumber)
                {
                    z = token.NumberValue;
                }
                else
                {
                    throw new IOException("Error reading z coordinate");
                }
                string symbol = "Du";
                symbol = PeriodicTable.GetSymbol(atomicNumber);
                IAtom atom = model.Builder.NewAtom(symbol);
                atom.Point3D = new Vector3(x, y, z);
                molecule.Atoms.Add(atom);
            }

            // this is the place where we store the atomcount to be used as a
            // counter in the nmr reading
            atomCount = molecule.Atoms.Count;
            moleculeSet.Add(molecule);
            model.MoleculeSet = moleculeSet;
        }
Exemplo n.º 31
0
        /// <summary>
        /// Instantiates the given analysis factory class after pulling params from
        /// the given stream tokenizer, then stores the result in the appropriate
        /// pipeline component list.
        /// </summary>
        /// <param name="stok">Stream tokenizer from which to draw analysis factory params.</param>
        /// <param name="clazz">Analysis factory class to instantiate.</param>
        private void CreateAnalysisPipelineComponent(StreamTokenizer stok, Type clazz)
        {
            IDictionary <string, string> argMap = new Dictionary <string, string>();
            bool parenthetical = false;

            try
            {
                while (stok.NextToken() != StreamTokenizer.TokenType_EndOfStream)
                {
                    switch (stok.TokenType)
                    {
                    case ',':
                    {
                        if (parenthetical)
                        {
                            // Do nothing
                            break;
                        }
                        else
                        {
                            // Finished reading this analysis factory configuration
                            goto WHILE_LOOP_BREAK;
                        }
                    }

                    case '(':
                    {
                        if (parenthetical)
                        {
                            throw RuntimeException.Create
                                      ("Line #" + GetLineNumber(stok) + ": Unexpected opening parenthesis.");
                        }
                        parenthetical = true;
                        break;
                    }

                    case ')':
                    {
                        if (parenthetical)
                        {
                            parenthetical = false;
                        }
                        else
                        {
                            throw RuntimeException.Create
                                      ("Line #" + GetLineNumber(stok) + ": Unexpected closing parenthesis.");
                        }
                        break;
                    }

                    case StreamTokenizer.TokenType_Word:
                    {
                        if (!parenthetical)
                        {
                            throw RuntimeException.Create("Line #" + GetLineNumber(stok) + ": Unexpected token '" + stok.StringValue + "'");
                        }
                        string argName = stok.StringValue;
                        stok.NextToken();
                        if (stok.TokenType != ':')
                        {
                            throw RuntimeException.Create
                                      ("Line #" + GetLineNumber(stok) + ": Missing ':' after '" + argName + "' param to " + clazz.Name);
                        }
                        stok.NextToken();
                        string argValue = stok.StringValue;
                        switch (stok.TokenType)
                        {
                        case StreamTokenizer.TokenType_Number:
                        {
                            argValue = stok.NumberValue.ToString(CultureInfo.InvariantCulture);
                            // Drop the ".0" from numbers, for integer arguments
                            argValue = TRAILING_DOT_ZERO_PATTERN.Replace(argValue, "", 1);
                            // Intentional fall-through
                            argMap[argName] = argValue;
                            break;
                        }

                        case '"':
                        case '\'':
                        case StreamTokenizer.TokenType_Word:
                        {
                            argMap[argName] = argValue;
                            break;
                        }

                        case StreamTokenizer.TokenType_EndOfStream:
                        {
                            throw RuntimeException.Create("Unexpected EOF: " + stok.ToString());
                        }

                        default:
                        {
                            throw RuntimeException.Create
                                      ("Line #" + GetLineNumber(stok) + ": Unexpected token: " + stok.ToString());
                        }
                        }
                        break;
                    }
                    }
                }
                WHILE_LOOP_BREAK : { }

                if (!argMap.ContainsKey("luceneMatchVersion"))
                {
#pragma warning disable 612, 618
                    argMap["luceneMatchVersion"] = LuceneVersion.LUCENE_CURRENT.ToString();
#pragma warning restore 612, 618
                }
                AbstractAnalysisFactory instance;
                try
                {
                    instance = (AbstractAnalysisFactory)Activator.CreateInstance(clazz, argMap);
                }
                catch (Exception e) when(e.IsException())
                {
                    throw RuntimeException.Create("Line #" + GetLineNumber(stok) + ": ", e);
                }
                if (instance is IResourceLoaderAware resourceLoaderAware)
                {
                    DirectoryInfo baseDir = new DirectoryInfo(RunData.Config.Get("work.dir", "work"));
                    resourceLoaderAware.Inform(new FilesystemResourceLoader(baseDir));
                }
                if (typeof(CharFilterFactory).IsAssignableFrom(clazz))
                {
                    charFilterFactories.Add((CharFilterFactory)instance);
                }
                else if (typeof(TokenizerFactory).IsAssignableFrom(clazz))
                {
                    tokenizerFactory = (TokenizerFactory)instance;
                }
                else if (typeof(TokenFilterFactory).IsAssignableFrom(clazz))
                {
                    tokenFilterFactories.Add((TokenFilterFactory)instance);
                }
            }
            catch (Exception e) when(e.IsRuntimeException())
            {
                if (e.Message.StartsWith("Line #", StringComparison.Ordinal))
                {
                    throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details)
                }
                else
                {
                    throw RuntimeException.Create("Line #" + GetLineNumber(stok) + ": ", e);
                }
            }
            catch (Exception t) when(t.IsThrowable())
            {
                throw RuntimeException.Create("Line #" + GetLineNumber(stok) + ": ", t);
            }
        }
Exemplo n.º 32
0
        public void TestWaebricInput()
        {
            //Set up tokenizer and input for tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("module test\n\nsite site/index.html : home()\nend"));

            //Tokenize stream and do some test with it
            int current = Tokenizer.NextToken();
            int position = 1;
            while(current != StreamTokenizer.EOF)
            {
                if (current == StreamTokenizer.LAYOUT)
                {
                    current = Tokenizer.NextToken();
                    continue; //ignore layout
                }
                switch (position)
                {
                    case 1: //module
                        Assert.AreEqual("module", Tokenizer.GetTextValue());
                        break;
                    case 2: //test
                        Assert.AreEqual("test", Tokenizer.GetTextValue());
                        break;
                    case 3: //site
                        Assert.AreEqual("site", Tokenizer.GetTextValue());
                        break;
                    case 4: //site
                        Assert.AreEqual("site", Tokenizer.GetTextValue());
                        break;
                    case 5: // /
                        Assert.AreEqual('/', Tokenizer.GetCharacterValue());
                        break;
                    case 6: //index
                        Assert.AreEqual("index", Tokenizer.GetTextValue());
                        break;
                    case 7: //.
                        Assert.AreEqual('.', Tokenizer.GetCharacterValue());
                        break;
                    case 8: //html
                        Assert.AreEqual("html", Tokenizer.GetTextValue());
                        break;
                    case 9: //:
                        Assert.AreEqual(':', Tokenizer.GetCharacterValue());
                        break;
                    case 10: //home
                        Assert.AreEqual("home", Tokenizer.GetTextValue());
                        break;
                    case 11: //(
                        Assert.AreEqual('(', Tokenizer.GetCharacterValue());
                        break;
                    case 12: //)
                        Assert.AreEqual(')', Tokenizer.GetCharacterValue());
                        break;
                    case 13: //end
                        Assert.AreEqual("end", Tokenizer.GetTextValue());
                        break;
                }
                position++;
                current = Tokenizer.NextToken();
            }
        }
Exemplo n.º 33
0
        public void TestWaebricInput()
        {
            //Set up tokenizer and input for tokenizer
            Tokenizer = new StreamTokenizer(new StringReader("module test\n\nsite site/index.html : home()\nend"));

            //Tokenize stream and do some test with it
            int current  = Tokenizer.NextToken();
            int position = 1;

            while (current != StreamTokenizer.EOF)
            {
                if (current == StreamTokenizer.LAYOUT)
                {
                    current = Tokenizer.NextToken();
                    continue; //ignore layout
                }
                switch (position)
                {
                case 1:     //module
                    Assert.AreEqual("module", Tokenizer.GetTextValue());
                    break;

                case 2:     //test
                    Assert.AreEqual("test", Tokenizer.GetTextValue());
                    break;

                case 3:     //site
                    Assert.AreEqual("site", Tokenizer.GetTextValue());
                    break;

                case 4:     //site
                    Assert.AreEqual("site", Tokenizer.GetTextValue());
                    break;

                case 5:     // /
                    Assert.AreEqual('/', Tokenizer.GetCharacterValue());
                    break;

                case 6:     //index
                    Assert.AreEqual("index", Tokenizer.GetTextValue());
                    break;

                case 7:     //.
                    Assert.AreEqual('.', Tokenizer.GetCharacterValue());
                    break;

                case 8:     //html
                    Assert.AreEqual("html", Tokenizer.GetTextValue());
                    break;

                case 9:     //:
                    Assert.AreEqual(':', Tokenizer.GetCharacterValue());
                    break;

                case 10:     //home
                    Assert.AreEqual("home", Tokenizer.GetTextValue());
                    break;

                case 11:     //(
                    Assert.AreEqual('(', Tokenizer.GetCharacterValue());
                    break;

                case 12:     //)
                    Assert.AreEqual(')', Tokenizer.GetCharacterValue());
                    break;

                case 13:     //end
                    Assert.AreEqual("end", Tokenizer.GetTextValue());
                    break;
                }
                position++;
                current = Tokenizer.NextToken();
            }
        }