Example #1
0
 public void BlankStringReturnsEof()
 {
     var tokenizer = new Tokenizer("    ");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "First Call");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "Second Call");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)), "Third Call");
 }
Example #2
0
 public void WordsInUnicode()
 {
     var tokenizer = new Tokenizer("method == Здравствуйте");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "method")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "==")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "Здравствуйте")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
Example #3
0
 public void StringWithDoubleQuotes()
 {
     var tokenizer = new Tokenizer("\"string at start\" \"may contain ' char\" \"string at end\"");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at start")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "may contain ' char")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at end")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
Example #4
0
 public void StringWithSingleQuotes()
 {
     var tokenizer = new Tokenizer("'string at start' 'may contain \" char' 'string at end'");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at start")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "may contain \" char")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at end")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
Example #5
0
 public void IdentifierTokens()
 {
     var tokenizer = new Tokenizer("  Identifiers x abc123 a1x  ");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "Identifiers")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "x")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "abc123")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "a1x")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
		public void LineCommentEof() {
			using (TestStringReader reader = new TestStringReader("0 -- /* don't /*** 'do ***/ this */ 0")) {
				Tokenizer tokenizer = new Tokenizer(reader, grammar);
				Token token;
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.BlockRead, tokenizer.NextToken(out token));
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.End, token.Symbol.Kind);
			}
		}
		public void BlockCommentWithUnclosedString() {
			using (TestStringReader reader = new TestStringReader("/* don't */ 'do this'")) {
				Tokenizer tokenizer = new Tokenizer(reader, grammar);
				Token token;
				Assert.Equal(ParseMessage.BlockRead, tokenizer.NextToken(out token));
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.End, token.Symbol.Kind);
			}
		}
Example #8
0
        private void ClosingBrace(Tokenizer.Tokenizer tokenizer)
        {
            AbstractToken token = tokenizer.NextToken();

            if (token.GetType() == typeof(ClosingBrace))
            {
                return;
            }

            throw new ParseException("Expected type ClosingBrace but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }
Example #9
0
 public bool MoveNext()
 {
     if (Tokenizer.EndOfStream)
     {
         return(false);
     }
     else
     {
         _current = Tokenizer.NextToken();
         return(true);
     }
 }
Example #10
0
        private void TokenizerOutput()
        {
            string testString = "";
            testString = File.ReadAllText(Environment.CurrentDirectory + @"\ExampleTextFiles\SampleInputFileForTokenization.txt");
            if (string.IsNullOrEmpty(testString))
                return;

            using (var t = new StringReader(testString))
            {
                var tokenizer = new Tokenizer(new ParseReader(t));
                var currentToken = tokenizer.NextToken();

                while (currentToken.GetText() != "$")
                {
                    Console.WriteLine(currentToken.GetType() + " " + currentToken.GetText());
                    currentToken = tokenizer.NextToken();
                }
            }

            Console.ReadKey();
        }
        public void TheNextTokenMethodShouldReturnTokenValueComma()
        {
            // Arrange
            var reader    = new StringReader(",");
            var tokenizer = new Tokenizer(reader);

            // Act
            var token = tokenizer.NextToken();

            // Assert
            Assert.Equal(",", token.Value);
        }
Example #12
0
        public void Test4()
        {
            var str = "3xy";

            var tokenizer = new Tokenizer(new StringReader(str));

            Assert.Equal(Token.Number, tokenizer.Token);
            Assert.Equal(3, tokenizer.Number);

            tokenizer.NextToken();

            Assert.Equal(Token.Variable, tokenizer.Token);
            Assert.Equal('x', tokenizer.Variable.Name);
            Assert.Equal(1, tokenizer.Variable.Power);

            tokenizer.NextToken();

            Assert.Equal(Token.Variable, tokenizer.Token);
            Assert.Equal('y', tokenizer.Variable.Name);
            Assert.Equal(1, tokenizer.Variable.Power);
        }
Example #13
0
        /// <summary> Sets the string value of the field specified.  See class docs for location spec syntax.</summary>
        public virtual void  Set(System.String spec, System.String value_Renamed)
        {
            Tokenizer tok     = new Tokenizer(spec, "-", false);
            Segment   segment = getSegment(tok.NextToken());

            int[] ind = getIndices(spec);
            if (log.DebugEnabled)
            {
                log.debug("Setting " + spec + " seg: " + segment.getStructureName() + " ind: " + ind[0] + " " + ind[1] + " " + ind[2] + " " + ind[3]);
            }
            Set(segment, ind[0], ind[1], ind[2], ind[3], value_Renamed);
        }
Example #14
0
        /// <summary> Returns the segment specified in the given segment_path_spec. </summary>
        public virtual Segment getSegment(System.String segSpec)
        {
            Segment seg = null;

            if (segSpec.Substring(0, (1) - (0)).Equals("/"))
            {
                Finder.reset();
            }

            Tokenizer     tok    = new Tokenizer(segSpec, "/", false);
            SegmentFinder finder = Finder;

            while (tok.HasMoreTokens)
            {
                System.String   pathSpec = tok.NextToken();
                Terser.PathSpec ps       = parsePathSpec(pathSpec);
                if (tok.HasMoreTokens)
                {
                    ps.isGroup = true;
                }
                else
                {
                    ps.isGroup = false;
                }

                if (ps.isGroup)
                {
                    Group g = null;
                    if (ps.find)
                    {
                        g = finder.findGroup(ps.pattern, ps.rep);
                    }
                    else
                    {
                        g = finder.getGroup(ps.pattern, ps.rep);
                    }
                    finder = new SegmentFinder(g);
                }
                else
                {
                    if (ps.find)
                    {
                        seg = finder.findSegment(ps.pattern, ps.rep);
                    }
                    else
                    {
                        seg = finder.getSegment(ps.pattern, ps.rep);
                    }
                }
            }

            return(seg);
        }
Example #15
0
        public DayTrigger(String triggerText, String name)
        {
            _name = name;
            Tokenizer tokenizer = new Tokenizer(triggerText, ' ');

            _day = (DayOfWeek)Enum.Parse(typeof(DayOfWeek), tokenizer.NextToken(), true);

            if (!tokenizer.TokenAvailable)
            {
                return;
            }

            String token = tokenizer.NextToken();

            if (token.Equals("from"))
            {
                token     = tokenizer.NextToken();
                _fromTime = ParseTime(token, triggerText);
            }
            else
            {
                throw new ArgumentException(String.Format("Token '{0}' in line '{1}'", token, triggerText));
            }

            if (!tokenizer.TokenAvailable)
            {
                return;
            }

            token = tokenizer.NextToken();
            if (token.Equals("to"))
            {
                token   = tokenizer.NextToken();
                _toTime = ParseTime(token, triggerText);
            }
            else
            {
                throw new ArgumentException(String.Format("Token '{0}' in line '{1}'", token, triggerText));
            }
        }
Example #16
0
        private static ArrayList createServers(string l, string an)
        {
            Tokenizer  tok = new Tokenizer(l, ",");
            string     t;
            Address    addr;
            int        port;
            ArrayList  retval = new ArrayList();
            Hashtable  hosts  = new Hashtable();
            ServerNode node;
            int        j = 0;

            while (tok.HasMoreTokens())
            {
                try
                {
                    t = tok.NextToken();
                    //C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
                    string host = t.Substring(0, (t.IndexOf((char)'[')) - (0));
                    host = host.Trim();
                    port = Convert.ToInt32(t.Substring(t.IndexOf((char)'[') + 1, (t.IndexOf((char)']')) - (t.IndexOf((char)'[') + 1)));
                    if (an != null && an.Length > 0)
                    {
                        if (an.Equals(host))
                        {
                            node = new ServerNode(host, true);
                        }
                        else
                        {
                            node = new ServerNode(host, false);
                        }
                    }
                    else
                    {
                        node = new ServerNode(host, false);
                    }

                    retval.Add(node);
                    //C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
                    j++;
                }
                catch (FormatException ex)
                {
                    throw ex;
                }
                catch (Exception ex)
                {
                    throw ex;
                }
            }

            return(retval);
        }
Example #17
0
        public void NextToken_AllKeywords_Found()
        {
            var tokenStream = new Tokenizer().Parse(String.Join(" ", Language.Keywords));

            for (int i = 0; i < Language.Keywords.Length; i++)
            {
                var token = tokenStream.NextToken();

                Assert.IsNotNull(token, "Null token");
                Assert.IsTrue(token.Type == TokenType.Keyword, $"{i}: {token.Type.ToString()}");
                Assert.IsTrue(token.TokenContent == Language.Keywords[i], $"{i}: {token.TokenContent}");
            }
        }
Example #18
0
        /// <summary>Gets path information from a path spec. </summary>
        private PathSpec parsePathSpec(System.String spec)
        {
            PathSpec ps = new PathSpec(this);

            if (spec.StartsWith("."))
            {
                ps.find = true;
                spec    = spec.Substring(1);
            }
            else
            {
                ps.find = false;
            }

            if (spec.Length == 0)
            {
                throw new HL7Exception("Invalid path (some path element is either empty or contains only a dot)");
            }
            Tokenizer tok = new Tokenizer(spec, "()", false);

            ps.pattern = tok.NextToken();
            if (tok.HasMoreTokens)
            {
                System.String repString = tok.NextToken();
                try
                {
                    ps.rep = System.Int32.Parse(repString);
                }
                catch (System.FormatException)
                {
                    throw new HL7Exception(repString + " is not a valid rep #", HL7Exception.APPLICATION_INTERNAL_ERROR);
                }
            }
            else
            {
                ps.rep = 0;
            }
            return(ps);
        }
Example #19
0
    private object ParsePrimary(Tokenizer tokens)
    {
        var nextToken = tokens.NextToken();

        switch (nextToken)
        {
        case "-":
            throw new Exception("Unary 1 not yet implemented");

        case "(":
        {
            var result = this.ParseExpression(tokens);
            if (tokens.PeekTokenOrEOT() != ")")
            {
                throw new Exception("Parse error - expected ')', but got " + tokens.PeekTokenOrEOT());
            }
            tokens.NextToken();
            return(result);
        }

        case "|":
        {
            var result = this.ParseExpression(tokens);
            if (tokens.PeekTokenOrEOT() != "|")
            {
                throw new Exception("Parse error - expected '|' bot got " + tokens.PeekTokenOrEOT());
            }
            tokens.NextToken();
            return(ApplyMagnitude(result));
        }

        default:
            if (tokens.PeekTokenOrEOT() == "(")
            {
                return(ParseFunctionCall(nextToken, tokens));
            }
            return(this.ResolveAtomic(nextToken));
        }
    }
        public static StringComparisonExpression Create(string expression)
        {
            Tokenizer t = new Tokenizer(expression);

            t.NextToken();
            Expression e = new Expression();

            e.Parse(t);
            return(new StringComparisonExpression()
            {
                m_Expression = e
            });
        }
Example #21
0
        public void NextToken_ForGivenValidBasicText_ReturnsExpectedTokens(string givenText, string expectedIdentifier, string expectedOpearator,
                                                                           TokenType expectedOperatorType, string expectedValue, TokenType expectedValueType)
        {
            using (var reader = new StringReader(givenText))
                using (var tokenizer = new Tokenizer(reader))
                {
                    var token1 = tokenizer.NextToken();
                    var token2 = tokenizer.NextToken();
                    var token3 = tokenizer.NextToken();

                    Assert.Equal(expectedIdentifier, token1.Value);
                    Assert.Equal(TokenType.Identifier, token1.Type);

                    Assert.Equal(expectedOpearator, token2.Value);
                    Assert.Equal(expectedOperatorType, token2.Type);

                    Assert.Equal(expectedValue, token3.Value);
                    Assert.Equal(expectedValueType, token3.Type);

                    Assert.False(tokenizer.CanRead());
                }
        }
        public void SymbolTokens_SingleChar()
        {
            var tokenizer = new Tokenizer("=!&|()");

            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "&")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "|")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "(")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, ")")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
        }
Example #23
0
        public void SymbolTokens_DoubleChar()
        {
            var tokenizer = new Tokenizer("==&&||!==~!~");

            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "==")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "&&")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "||")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!=")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=~")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!~")));
            Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
        }
Example #24
0
    private MdfMaterial ParseClipper()
    {
        var result = new MdfMaterial(MdfType.Clipper);

        result.enableZWrite     = false;
        result.enableColorWrite = false;

        var tokenizer = new Tokenizer(mContent);

        tokenizer.NextToken();         // Skip material type

        while (tokenizer.NextToken())
        {
            if (tokenizer.IsNamedIdentifier("wire"))
            {
                result.wireframe        = true;
                result.enableColorWrite = true;
            }
            else if (tokenizer.IsNamedIdentifier("zfill"))
            {
                result.enableZWrite = true;
            }
            else if (tokenizer.IsNamedIdentifier("outline"))
            {
                result.outline          = true;
                result.enableColorWrite = true;
            }
            else
            {
                if (mStrict)
                {
                    throw CreateError("Unrecognized token '{0}'", tokenizer.TokenText.ToString());
                }
            }
        }

        return(result);
    }
Example #25
0
        public void NextToken_ZerosThenInteger_Found()
        {
            string zeros       = "0000";
            string number      = "1029381092840293480923";
            string testString  = zeros + number;
            var    tokenStream = new Tokenizer().Parse(testString);
            Token  token;

            for (int i = 0; i < zeros.Length; i++)
            {
                token = tokenStream.NextToken();

                Assert.IsNotNull(token);
                Assert.IsTrue(token.Type == TokenType.Integer);
                Assert.IsTrue(token.TokenContent == "0");
            }

            token = tokenStream.NextToken();

            Assert.IsNotNull(token);
            Assert.IsTrue(token.Type == TokenType.Integer);
            Assert.IsTrue(token.TokenContent == number);
        }
Example #26
0
        public void NextToken_ConsecutiveZeros_Found()
        {
            string testString  = "000000000000000000000000";
            var    tokenStream = new Tokenizer().Parse(testString);

            for (int i = 0; i < testString.Length; i++)
            {
                var token = tokenStream.NextToken();

                Assert.IsNotNull(token);
                Assert.IsTrue(token.Type == TokenType.Integer);
                Assert.IsTrue(token.TokenContent == "0");
            }
        }
Example #27
0
        /// <summary> Get a string of the form "P1(config_str1):P2:P3(config_str3)" and return
        /// ProtocolConfigurations for it. That means, parse "P1(config_str1)", "P2" and
        /// "P3(config_str3)"
        /// </summary>
        /// <param name="config_str">Configuration string
        /// </param>
        /// <returns> Vector of ProtocolConfigurations
        /// </returns>
        public virtual System.Collections.ArrayList parseComponentStrings(string config_str, string delimiter)
        {
            System.Collections.ArrayList retval = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(10));
            Tokenizer tok;
            string    token;

            tok = new Tokenizer(config_str, delimiter);
            while (tok.HasMoreTokens())
            {
                token = tok.NextToken();
                retval.Add(token);
            }

            return(retval);
        }
        public void NextToken_DoubleNumbersString_ReturnsDoubleNumber()
        {
            // Arrange
            const string doubleNumbersString = "10.5 .5";

            using var reader = new StringReader(doubleNumbersString);
            var tokenizer = new Tokenizer(reader);

            // Act
            tokenizer.Init();
            tokenizer.NextToken();

            // Assert
            tokenizer.CurrentToken.Number.Should().Be(0.5);
        }
Example #29
0
        public void ShouldProcessPunctuations()
        {
            string    punct     = "().|[]";
            Tokenizer tokenizer = new Tokenizer(punct);
            Token     token;

            for (int k = 0; k < punct.Length; k++)
            {
                token = tokenizer.NextToken();
                Assert.IsNotNull(token);
                Assert.AreEqual(punct[k], token.Value[0]);
                Assert.AreEqual(1, token.Value.Length);
                Assert.AreEqual(TokenType.Punctuation, token.Type);
            }
        }
        public void NextToken_SymbolsString_ReturnsSymbol()
        {
            // Arrange
            const string symbolsString = "- +";

            using var reader = new StringReader(symbolsString);
            var tokenizer = new Tokenizer(reader);

            // Act
            tokenizer.Init();
            tokenizer.NextToken();

            // Assert
            tokenizer.CurrentToken.Symbols.First().Should().Be('+');
        }
Example #31
0
        public void TokenizerTest()
        {
            var testString = "2       + 3 - 4.5";

            var tokenizer = new Tokenizer(new StringReader(testString));

            Assert.AreEqual(tokenizer.Token, Token.Number);
            Assert.AreEqual(tokenizer.Number, 2);
            tokenizer.NextToken();

            Assert.AreEqual(tokenizer.Token, Token.Plus);
            tokenizer.NextToken();

            Assert.AreEqual(tokenizer.Token, Token.Number);
            Assert.AreEqual(tokenizer.Number, 3);
            tokenizer.NextToken();

            Assert.AreEqual(tokenizer.Token, Token.Minus);
            tokenizer.NextToken();

            Assert.AreEqual(tokenizer.Token, Token.Number);
            Assert.AreEqual(tokenizer.Number, 4.5);
            tokenizer.NextToken();
        }
Example #32
0
 /// <summary>
 /// Handles infix operators.  Should not be called from anyplace but ParseExpression.
 /// Does not currently handle unary or right-associative operators.
 /// </summary>
 /// <param name="lhs">Value of the left-hand side argument to the operator.</param>
 /// <param name="tokens">Token stream</param>
 /// <param name="minPrecedence">The precedence of any infix operator of which this is the rhs.</param>
 /// <returns>Value of the parsed expression</returns>
 private object ParseOperatorExpression(object lhs, Tokenizer tokens, int minPrecedence)
 {
     while (!tokens.EndOfTokens && IsBinaryOperator(tokens.PeekToken()) && Precedence(tokens.PeekToken()) >= minPrecedence)
     {
         string op  = tokens.NextToken();
         object rhs = this.ParsePrimary(tokens);
         while (!tokens.EndOfTokens && IsBinaryOperator(tokens.PeekToken()) &&
                Precedence(tokens.PeekToken()) > Precedence(op))
         {
             rhs = this.ParseOperatorExpression(rhs, tokens, Precedence(tokens.PeekToken()));
         }
         lhs = ApplyOperator(op, lhs, rhs);
     }
     return(lhs);
 }
        public void NextToken_IntegerNumbersString_ReturnsIntegerNumber()
        {
            // Arrange
            const string integerNumbersString = "10 20";

            using var reader = new StringReader(integerNumbersString);
            var tokenizer = new Tokenizer(reader);

            // Act
            tokenizer.Init();
            tokenizer.NextToken();

            // Assert
            tokenizer.CurrentToken.Number.Should().Be(20);
        }
Example #34
0
        internal static void Begin(Tokenizer tokenizer, WordListBuilder wlbParent)
        {
            var       wlbBegin = new WordListBuilder();
            Evaluable evalCond = null;

            while (true)
            {
                var word = tokenizer.NextToken().ToLower();

                switch (word)
                {
                case "again":
                    wlbParent.Add(new BeginPrim(wlbBegin.Realize()));
                    return;

                case "until":
                    wlbParent.Add(new BeginPrim(wlbBegin.Realize(), true));
                    return;

                case "while":
                    evalCond = wlbBegin.Realize();
                    wlbBegin.Clear();
                    continue;

                case "repeat" when evalCond == null:
                    throw new NfException("\"begin...repeat\" with no while");

                case "repeat":
                    wlbParent.Add(new BeginPrim(wlbBegin.Realize(), false, evalCond));
                    return;
                }

                var evaluable = Interpreter.ParseWord(word);
                if (evaluable == null)
                {
                    // TODO: get more robust error handling
                    throw new NfException($"Couldn't locate word {word}");
                }

                if (evaluable.IsImmediate)
                {
                    // Only runs at compile time so no need to supply a parent here.
                    evaluable.Eval(tokenizer, wlbBegin);
                    continue;
                }
                wlbBegin.Add(evaluable);
            }
        }
Example #35
0
        private void _addNodeForParameter(TreeNode model, TreeNode root, string key)
        {
            if (key.IndexOf("parent.") == -1)
            {
                var tok  = new Tokenizer(key, ".");
                var path = new string[tok.Count];
                var t    = 0;
                while (tok.HasMoreTokens())
                {
                    path[t++] = tok.NextToken();
                }
                var parent = root;

                for (var i = 0; i < path.Length; ++i)
                {
                    var children = model is ParameterDatabaseTreeModel
                        ? ((ParameterDatabaseTreeModel)model).GetChildCount(parent)
                        : parent.Nodes.Count;
                    if (children > 0)
                    {
                        var c = 0;
                        for (; c < children; ++c)
                        {
                            var child = parent.Nodes[c];
                            if (child.Tag.Equals(path[i]))
                            {
                                parent = child;
                                break;
                            }
                        }

                        if (c == children)
                        {
                            TreeNode child = new ParameterDatabaseTreeNode(path[i]);
                            parent.Nodes.Insert(parent.Nodes.Count, child);
                            parent = child;
                        }
                    }
                    // If the parent has no children, just add the node.
                    else
                    {
                        TreeNode child = new ParameterDatabaseTreeNode(path[i]);
                        parent.Nodes.Insert(0, child);
                        parent = child;
                    }
                }
            }
        }
        /// <summary>Returns the value of the <code>name</code> property as an array of
        /// strings.  If no such property is specified, then <code>null</code>
        /// is returned.  Values are whitespace or comma delimted.
        /// </summary>
        public virtual System.String[] GetStrings(System.String name)
        {
            System.String valueString = GetPoperty(name);
            if (valueString == null)
            {
                return(null);
            }
            Tokenizer tokenizer = new Tokenizer(valueString, ", \t\n\r\f");

            System.Collections.IList values = new System.Collections.ArrayList();
            while (tokenizer.HasMoreTokens())
            {
                values.Add(tokenizer.NextToken());
            }
            return((System.String[])Support.ICollectionSupport.ToArray(values, new System.String[values.Count]));
        }
Example #37
0
        private void Comma(Tokenizer.Tokenizer tokenizer, Boolean optional = false)
        {
            AbstractToken token = tokenizer.Peek();

            if (token.GetType() == typeof(Comma))
            {
                tokenizer.NextToken();
                return;
            }

            if (optional)
            {
                return;
            }

            throw new ParseException("Expected type Comma but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }
Example #38
0
        /// <summary> Loads scoring matrix from a file.</summary>
        /// <returns> loaded matrix </returns>
        public static Matrix Load(string matrixPath)
        {
            char[] acids = new char[SIZE];

            // Initialize the acids array to null values (ascii = 0)
            for (int i = 0; i < SIZE; i++)
            {
                acids[i] = (char)(0);
            }

            float[,] scores = new float[SIZE, SIZE];

            StreamReader reader = new StreamReader(matrixPath);

            // Skip the comment lines
            string line;

            while ((line = reader.ReadLine()) != null && line.Trim()[0] == COMMENT_STARTER)
            {
                ;
            }

            // Read the headers line (the letters of the acids)
            Tokenizer tokenizer = new Tokenizer(line.Trim());

            for (int j = 0; tokenizer.HasMoreTokens(); j++)
            {
                acids[j] = tokenizer.NextToken()[0];
            }

            // Read the scores
            while ((line = reader.ReadLine()) != null)
            {
                tokenizer = new Tokenizer(line.Trim());
                char acid = tokenizer.NextToken()[0];
                for (int i = 0; i < SIZE; i++)
                {
                    if (acids[i] != 0)
                    {
                        scores[acid, acids[i]] = System.Single.Parse(tokenizer.NextToken());
                    }
                }
            }
            return(new Matrix(matrixPath, scores));
        }
        public void TestTokenizerMultiLine()
        {
            const string testcase0 = "p1\n2C 3C \n 4C 5C \t 6C ";
            var          t         = new Tokenizer(new StringReader(testcase0));

            Assert.AreEqual("p1", t.NextToken());
            Assert.AreEqual("2C", t.NextToken());
            Assert.AreEqual("3C", t.NextToken());
            Assert.AreEqual("4C", t.NextToken());
            Assert.AreEqual("5C", t.NextToken());
            Assert.AreEqual("6C", t.NextToken());
            Assert.AreEqual(null, t.NextToken());

            const string testcase1 = "\r\n\r\n";

            t = new Tokenizer(new StringReader(testcase1));
            Assert.AreEqual(null, t.NextToken());
        }
Example #40
0
        public InputLineModel Parse(string line)
        {
            using (var stringReader = new StringReader(line))
            {
                var tokenizer = new Tokenizer(stringReader);

                while (tokenizer.Token != Token.EOL)
                {
                    _builder.BuildComment(tokenizer);
                    _builder.BuildId(tokenizer);
                    _builder.BuildName(tokenizer);
                    _builder.BuildQuantities(tokenizer);
                    tokenizer.NextToken();
                }
            }

            return(_builder.Build());
        }
Example #41
0
 /// <summary>
 /// Parses prefix formal function calls, e.g. f(arg).
 /// Called after function name read but before ( is read.
 /// </summary>
 /// <param name="functionName">Name of function being called</param>
 /// <param name="tokens">Tokenizer</param>
 /// <returns>Variable representing the result of the function.</returns>
 private object ParseFunctionCall(string functionName, Tokenizer tokens)
 {
     tokens.NextToken();  // Skip open paren
     var args = new List<object> { this.ParseExpression(tokens) };
     while (tokens.PeekToken() == ",")
     {
         tokens.NextToken();   // Skip comma
         args.Add(this.ParseExpression(tokens));
     }
     if (tokens.PeekToken() == ")")
     {
         tokens.NextToken();   // Skip close paren
         return this.ApplyFunction(functionName, args);
     }
     throw new Exception("Expected ) but got "+tokens.PeekToken());
 }
Example #42
0
        /// <summary> Returns a String representing the encoding of the given message, if
        /// the encoding is recognized.  For example if the given message appears
        /// to be encoded using HL7 2.x XML rules then "XML" would be returned.
        /// If the encoding is not recognized then null is returned.  That this
        /// method returns a specific encoding does not guarantee that the
        /// message is correctly encoded (e.g. well formed XML) - just that
        /// it is not encoded using any other encoding than the one returned.
        /// </summary>
        public override System.String getEncoding(System.String message)
        {
            System.String encoding = null;

            //quit if the string is too short
            if (message.Length < 4)
                return null;

            //see if it looks like this message is | encoded ...
            bool ok = true;

            //string should start with "MSH"
            if (!message.StartsWith("MSH"))
                return null;

            //4th character of each segment should be field delimiter
            char fourthChar = message[3];
            char[] fourthCharArray = fourthChar.ToString().ToCharArray();
            Tokenizer st = new Tokenizer(message, segDelim, false);
            while (st.HasMoreTokens)
            {
                System.String x = st.NextToken();
                if (x.Length > 0)
                {
                    if (System.Char.IsWhiteSpace(x[0]))
                        x = stripLeadingWhitespace(x);

                    if (x.Length >= 4)
                    {
                        string[] parts = x.Split(fourthCharArray,1);
                        if(parts.Length==0)
                            return null;

                        if(parts[0].Length==3 && x[3] !=fourthChar)
                        {
                            return null;
                        }
                        //CDC: If a segment is a non-standard segment and there are multiple, the segment name might by 4 char long
                        if(parts[0].Length != x.Length && parts[0].Length==4 && x[4] !=fourthChar)
                        {
                            return null;
                        }
                    }
                }
            }

            //should be at least 11 field delimiters (because MSH-12 is required)
            int nextFieldDelimLoc = 0;
            for (int i = 0; i < 11; i++)
            {
                //UPGRADE_WARNING: Method 'java.lang.String.indexOf' was converted to 'System.String.IndexOf' which may throw an exception. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1101'"
                nextFieldDelimLoc = message.IndexOf((System.Char) fourthChar, nextFieldDelimLoc + 1);
                if (nextFieldDelimLoc < 0)
                    return null;
            }

            if (ok)
                encoding = "VB";

            return encoding;
        }
Example #43
0
        /// <summary> Splits the given composite string into an array of components using the given
        /// delimiter.
        /// </summary>
        public static System.String[] split(System.String composite, System.String delim)
        {
            System.Collections.ArrayList components = new System.Collections.ArrayList();

            //defend against evil nulls
            if (composite == null)
                composite = "";
            if (delim == null)
                delim = "";

            Tokenizer tok = new Tokenizer(composite, delim, false);
            bool previousTokenWasDelim = true;
            while (tok.HasMoreTokens)
            {
                System.String thisTok = tok.NextToken();
                if (thisTok.Equals(delim))
                {
                    if (previousTokenWasDelim)
                        components.Add(null);
                    previousTokenWasDelim = true;
                }
                else
                {
                    components.Add(thisTok);
                    previousTokenWasDelim = false;
                }
            }

            System.String[] ret = new System.String[components.Count];
            for (int i = 0; i < components.Count; i++)
            {
                ret[i] = ((System.String) components[i]);
            }

            return ret;
        }
		/*
		doctype: omit | auto | strict | loose | <fpi>
		
		where the fpi is a string similar to
		
		"-//ACME//DTD HTML 3.14159//EN"
		*/
		/* protected internal */ 
		internal string ParseDocType(string s, string option)
		{
			s = s.Trim();
			
			/* "-//ACME//DTD HTML 3.14159//EN" or similar */
			
			if (s.StartsWith("\""))
			{
				DocType = TidyNet.DocType.User;
				return s;
			}
			
			/* read first word */
			string word = "";
			Tokenizer t = new Tokenizer(s, " \t\n\r,");
			if (t.HasMoreTokens())
			{
				word = t.NextToken();
			}
			
			if (String.Compare(word, "omit") == 0)
			{
				DocType = TidyNet.DocType.Omit;
			}
			else if (String.Compare(word, "strict") == 0)
			{
				DocType = TidyNet.DocType.Strict;
			}
			else if (String.Compare(word, "loose") == 0 || String.Compare(word, "transitional") == 0)
			{
				DocType = TidyNet.DocType.Loose;
			}
			else if (String.Compare(word, "auto") == 0)
			{
				DocType = TidyNet.DocType.Auto;
			}
			else
			{
				DocType = TidyNet.DocType.Auto;
				Report.BadArgument(option);
			}
			return null;
		}
		private void ParsePreTagNames(string s, string option)
		{
			Tokenizer t = new Tokenizer(s, " \t\n\r,");
			while (t.HasMoreTokens())
			{
				tt.DefinePreTag(t.NextToken());
			}
		}
Example #46
0
 public void MixedTokens_Simple()
 {
     var tokenizer = new Tokenizer("id=123");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "id")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "123")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
		public RegisterSet(string header)
		{
			//extract the registers used...
			/*
			 * Example header: 
			 * 
			//
            // Generated by Microsoft (R) D3DX9 Shader Compiler 9.15.779.0000
            //
            // Parameters:
            //
            //   float2 shadowCameraNearFar;
            //
            //
            // Registers:
            //
            //   Name                Reg   Size
            //   ------------------- ----- ----
            //   shadowCameraNearFar c0       1
            //
			 * 
			 */

			Dictionary<string, Register> registers = new Dictionary<string, Register>();

			Tokenizer tokenizer = new Tokenizer(header, false, true, true);
			string[] lines = header.Split('\n');
			int state = 0;

			while (tokenizer.NextToken())
			{
				switch (tokenizer.Token)
				{
					case ":":
						break;
					case "Parameters":
						state = 1;
						break;

					case "//":

						//determine if the line has content...
						if (lines[tokenizer.Line].Trim().Length > 2)
						{

							if (state == 1)
							{
								//try and extract something

								//   float2 shadowCameraNearFar;

								tokenizer.NextToken();

								string type = tokenizer.Token;
								tokenizer.NextToken();

								if (type == "Registers")
								{
									state = 2; //done, go to registers
									break;
								}

								if (type == "struct")
								{
									string structContents = "";
									string structName = "";
									try
									{
										while (tokenizer.Token != "{")
											tokenizer.NextToken();
										tokenizer.ReadBlock();
										structContents = tokenizer.Token;
										tokenizer.NextToken();
										structName = tokenizer.Token;
									}
									catch
									{
									}
									finally
									{
										throw new CompileException(string.Format("Shader compiler cannot map the custom constant structure '{0} {1}' into a compatible XNA data structure", structName, structContents.Replace(Environment.NewLine, "").Replace("//","")));
									}
								}

								string name = tokenizer.Token;

								//possible array, or ;
								tokenizer.NextToken();
								string token = tokenizer.Token;
								int array = -1;

								if (token == "[")
								{
									tokenizer.NextToken();
									array = int.Parse(tokenizer.Token);
									tokenizer.NextToken(); //eat the ]
									tokenizer.NextToken();
								}

								//should be a ;
								if (tokenizer.Token != ";")
									throw new CompileException("Expected ';' in shader header");

								Register reg = new Register();
								reg.ArraySize = array;
								reg.Name = name;
								reg.Type = type;

								registers.Add(name, reg);
							}

							if (state == 2 || state == 3 || state == 4)
							{
								//   Name                Reg   Size
								//   ------------------- ----- ----
								//   shadowCameraNearFar c0       1

								string name, register, size;

								tokenizer.NextToken();
								name = tokenizer.Token;

								tokenizer.NextToken();
								register = tokenizer.Token;

								tokenizer.NextToken();
								size = tokenizer.Token;

								bool skip = false;

								if (name == "Name" && register == "Reg" && size == "Size")
									skip = true;
								if (name.Replace("-","").Length == 0 &&
									register.Replace("-","").Length == 0 &&
									size.Replace("-","").Length == 0)
									skip = true;

								if (!skip)
								{
									Register reg;
									if (registers.TryGetValue(name, out reg))
									{
										reg.Size = int.Parse(size);
										switch (register[0])
										{
											case 'c':
												reg.Category = RegisterCategory.Float4;
												break;
											case 'i':
												reg.Category = RegisterCategory.Integer4;
												break;
											case 'b':
												reg.Category = RegisterCategory.Boolean;
												break;
											case 't':
												reg.Category = RegisterCategory.Texture;
												break;
											case 's':
												reg.Category = RegisterCategory.Sampler;
												break;
											case 'r':
												reg.Category = RegisterCategory.Temp;
												break;
											default:
												throw new CompileException(string.Format("Unexpected constant type '{0}'", register[0]));
										}
										reg.Index = int.Parse(register.Substring(1));
										reg.Rank = ExtractRank(reg.Type, reg.Category, reg.ArraySize, reg.Size);
										registers[name] = reg;
									}
								}
							}
						}
						
						break;
				}
			}

			List<Register> registerList = new List<Register>();
			foreach (Register register in registers.Values)
				registerList.Add(register);

			this.registers = registerList.ToArray();
		}
Example #48
0
 public void SymbolTokens_SingleChar()
 {
     var tokenizer = new Tokenizer("=!&|()");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "&")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "|")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "(")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, ")")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
		public void CheckLexicalErrorOnEnd() {
			using (TestStringReader reader = new TestStringReader("'")) {
				Tokenizer tokenizer = new Tokenizer(reader, grammar);
				Token token;
				Assert.Equal(ParseMessage.LexicalError, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Error, token.Symbol.Kind);
				Assert.Equal("'", token.Text);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.End, token.Symbol.Kind);
			}
		}
		public void EndOfDataWithUnfinishedTerminal() {
			using (TestStringReader reader = new TestStringReader("0 'zero")) {
				Tokenizer tokenizer = new Tokenizer(reader, grammar);
				Token token;
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.LexicalError, tokenizer.NextToken(out token));
			}
		}
Example #51
0
 public void SymbolTokens_DoubleChar()
 {
     var tokenizer = new Tokenizer("==&&||!==~!~");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "==")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "&&")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "||")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!=")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=~")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "!~")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
		public void CheckUnmergedLexicalError() {
			using (TestStringReader reader = new TestStringReader("1+Nx*200")) {
				Tokenizer tokenizer = new Tokenizer(reader, grammar);
				Token token;
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.LexicalError, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Error, token.Symbol.Kind);
				Assert.Equal("N", token.Text);
				Assert.Equal(ParseMessage.LexicalError, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Error, token.Symbol.Kind);
				Assert.Equal("x", token.Text);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Terminal, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.End, token.Symbol.Kind);
			}
		}
		public void CheckTokens() {
			using (TestStringReader reader = GetReader()) {
				Tokenizer tokenizer = new Tokenizer(reader, grammar);
				Token token;
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("0", token.Text);
				Assert.Equal(0, token.Position.Index);
				Assert.Equal("Integer", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("*", token.Text);
				Assert.Equal(1, token.Position.Index);
				Assert.Equal("*", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("(", token.Text);
				Assert.Equal(2, token.Position.Index);
				Assert.Equal("(", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("3", token.Text);
				Assert.Equal(3, token.Position.Index);
				Assert.Equal("Integer", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("-", token.Text);
				Assert.Equal(4, token.Position.Index);
				Assert.Equal("-", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("5", token.Text);
				Assert.Equal(5, token.Position.Index);
				Assert.Equal("Integer", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(")", token.Text);
				Assert.Equal(6, token.Position.Index);
				Assert.Equal(")", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("/", token.Text);
				Assert.Equal(7, token.Position.Index);
				Assert.Equal("/", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("-", token.Text);
				Assert.Equal(8, token.Position.Index);
				Assert.Equal("-", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("9", token.Text);
				Assert.Equal(9, token.Position.Index);
				Assert.Equal("Integer", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(" ", token.Text);
				Assert.Equal(10, token.Position.Index);
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.BlockRead, tokenizer.NextToken(out token));
				Assert.Equal("-- line comment", token.Text);
				Assert.Equal(11, token.Position.Index);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("+", token.Text);
				Assert.Equal(28, token.Position.Index);
				Assert.Equal("+", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("1.0", token.Text);
				Assert.Equal(29, token.Position.Index);
				Assert.Equal("Float", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.BlockRead, tokenizer.NextToken(out token));
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.Noise, token.Symbol.Kind);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal("*", token.Text);
				Assert.Equal(54, token.Position.Index);
				Assert.Equal("*", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(".0e2", token.Text);
				Assert.Equal(55, token.Position.Index);
				Assert.Equal("Float", token.Symbol.Name);
				Assert.Equal(ParseMessage.TokenRead, tokenizer.NextToken(out token));
				Assert.Equal(SymbolKind.End, token.Symbol.Kind);
			}
		}
Example #54
0
 public void StringWithSlashes()
 {
     var tokenizer = new Tokenizer("/string at start/ /may contain \" char/ /string at end/");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at start")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "may contain \" char")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "string at end")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
Example #55
0
 /// <summary>
 /// Handles infix operators.  Should not be called from anyplace but ParseExpression.
 /// Does not currently handle unary or right-associative operators.
 /// </summary>
 /// <param name="lhs">Value of the left-hand side argument to the operator.</param>
 /// <param name="tokens">Token stream</param>
 /// <param name="minPrecedence">The precedence of any infix operator of which this is the rhs.</param>
 /// <returns>Value of the parsed expression</returns>
 private object ParseOperatorExpression(object lhs, Tokenizer tokens, int minPrecedence)
 {
     while (!tokens.EndOfTokens && IsBinaryOperator(tokens.PeekToken()) && Precedence(tokens.PeekToken()) >= minPrecedence)
     {
         string op = tokens.NextToken();
         object rhs = this.ParsePrimary(tokens);
         while (!tokens.EndOfTokens && IsBinaryOperator(tokens.PeekToken())
                && Precedence(tokens.PeekToken()) > Precedence(op))
         {
             rhs = this.ParseOperatorExpression(rhs, tokens, Precedence(tokens.PeekToken()));
         }
         lhs = ApplyOperator(op, lhs, rhs);
     }
     return lhs;
 }
Example #56
0
 public void StringsMayContainEscapedQuoteChar()
 {
     var tokenizer = new Tokenizer("/abc\\/xyz/   'abc\\'xyz'  \"abc\\\"xyz\"");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "abc/xyz")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "abc'xyz")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "abc\"xyz")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
Example #57
0
    private object ParsePrimary(Tokenizer tokens)
    {
        var nextToken = tokens.NextToken();
        switch (nextToken)
        {
            case "-":
                throw new Exception("Unary 1 not yet implemented");

            case "(":
                {
                    var result = this.ParseExpression(tokens);
                    if (tokens.PeekTokenOrEOT() != ")")
                        throw new Exception("Parse error - expected ')', but got "+tokens.PeekTokenOrEOT());
                    tokens.NextToken();
                    return result;
                }

            case "|":
                {
                    var result = this.ParseExpression(tokens);
                    if (tokens.PeekTokenOrEOT() != "|")
                        throw new Exception("Parse error - expected '|' bot got "+tokens.PeekTokenOrEOT());
                    tokens.NextToken();
                    return ApplyMagnitude(result);
                }

            default:
                if (tokens.PeekTokenOrEOT() == "(")
                    return ParseFunctionCall(nextToken, tokens);
                return this.ResolveAtomic(nextToken);
        }
    }
Example #58
0
 public void MixedTokens_Complex()
 {
     var tokenizer = new Tokenizer("name =~ '*DataBase*' && (category = Urgent || Priority = High)");
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "name")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=~")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.String, "*DataBase*")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "&&")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "(")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "category")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "Urgent")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "||")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "Priority")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, "=")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Word, "High")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Symbol, ")")));
     Assert.That(tokenizer.NextToken(), Is.EqualTo(new Token(TokenKind.Eof)));
 }
		private static void Parse(Tokenizer tokenizer, List<HlslStructure> list)
		{
			int braceDepth = tokenizer.BraceDepth;
			int bracketDepth = tokenizer.BracketDepth;
			List<ShaderStatement> buffer = new List<ShaderStatement>();

			bool breakOnNewLine = false;
			bool isNewLine = false;

			//parsing a top level type declaration, eg, float4 test = ...;
			//will be set to false when hitting the '=' or a brace.
			//this is used to detect annotations on types (and ignore them!)
			bool processingTypeDeclaration = braceDepth == 0;

			while (tokenizer.NextToken())
			{
				if (isNewLine)
				{
					if (tokenizer.Token.Length == 1 &&
						tokenizer.Token[0] == '#') //#include, #if, etc.
					{
						breakOnNewLine = true;
						processingTypeDeclaration = false;
					}
				}

				if (processingTypeDeclaration && (tokenizer.Token == "=" || tokenizer.BraceDepth > 0))
					processingTypeDeclaration = false;


				//detect annotations (in a <> block)
				if (processingTypeDeclaration && tokenizer.Token == "<")
				{
					//skip the annotation entirely.

					int blockDepth = 1;
					//parse until the matching '>'
					while (tokenizer.NextToken() && blockDepth != 0)
					{
						if (tokenizer.Token.Length == 1)
						{
							if (tokenizer.Token[0] == '<')
								blockDepth++;
							else
							if (tokenizer.Token[0] == '>')
								blockDepth--;
						}
					}
				}


				if (tokenizer.TokenIsNewLine)
					isNewLine = true;
				else
					isNewLine = false;

				if (bracketDepth < tokenizer.BracketDepth && !processingTypeDeclaration)
				{
					List<HlslStructure> nodes = new List<HlslStructure>();

					while (bracketDepth < tokenizer.BracketDepth)
						Parse(tokenizer, nodes);

					list.Add(new HlslStructure(buffer.ToArray(), nodes.ToArray(), false, true));

					buffer.Clear();
					continue;
				}

				if (braceDepth < tokenizer.BraceDepth)
				{
					List<HlslStructure> nodes = new List<HlslStructure>();

					while (braceDepth < tokenizer.BraceDepth)
						Parse(tokenizer, nodes);

					list.Add(new HlslStructure(buffer.ToArray(), nodes.ToArray(), true, false));

					buffer.Clear();
					processingTypeDeclaration = tokenizer.BraceDepth == 0;

					continue;
				}

				if (braceDepth > tokenizer.BraceDepth)
					break;
				
				if (bracketDepth > tokenizer.BracketDepth && !processingTypeDeclaration)
					break;

				if (!tokenizer.TokenIsNewLine)
					buffer.Add(new ShaderStatement(tokenizer.Token, tokenizer.Line));

				if (tokenizer.Token == ";" || (breakOnNewLine && tokenizer.TokenIsNewLine))
					break;
			}

			if (buffer.Count > 0)
				list.Add(new HlslStructure(buffer.ToArray(), new HlslStructure[0], false, false));
		}
Example #60
0
        private AbstractToken ValueStop(Tokenizer.Tokenizer tokenizer)
        {
            AbstractToken token = tokenizer.NextToken();

            if (token.GetType() == typeof(ClosingBrace) || token.GetType() == typeof(ValueQuote))
            {
                return token;
            }

            throw new ParseException("Expected type ClosingBrace or ValueQuote but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25));
        }