Exemple #1
0
        /// <summary>
        /// Parse
        /// </summary>
        /// <param name="rule">Rule to use</param>
        /// <param name="source">Source lines</param>
        /// <returns>Tokens</returns>
        public static IEnumerable <Token> Parse(this ITokenDescriptionRules rule, IEnumerable <string> source)
        {
            if (null == rule)
            {
                throw new ArgumentNullException("rule");
            }

            return(Tokenizer.Parse(source, rule));
        }
Exemple #2
0
        /// <summary>
        /// Parse
        /// </summary>
        /// <param name="source">Lines to parse</param>
        /// <param name="rules">Rules to use</param>
        public static IEnumerable <Token> Parse(this IEnumerable <string> source,
                                                ITokenDescriptionRules rules)
        {
            if (null == source)
            {
                throw new ArgumentNullException(nameof(source));
            }

            if (null == rules)
            {
                rules = Dependencies.GetService <ITokenDescriptionRules>();
            }

            if (null == rules)
            {
                throw new ArgumentNullException(nameof(rules));
            }

            return(CoreParse(source, rules));
        }
Exemple #3
0
        // Parsing
        private static IEnumerable <Token> CoreParse(IEnumerable <string> source, ITokenDescriptionRules rule)
        {
            Token  current = null;
            string prefix  = null;

            int line = -1;

            List <Token> context = new List <Token>();

            TokenDescription.TokenDescriptionMatch match;

            StringBuilder sb = new StringBuilder();

            foreach (var lineOfSource in source)
            {
                line += 1;
                int column = 0;

                while (column < lineOfSource.Length)
                {
                    // Long lexeme
                    if (current != null)
                    {
                        if (current.Description.TryMatchStop(lineOfSource, column, context, out match, prefix))
                        {
                            current.StopLine   = line;
                            current.StopColumn = match.To;

                            sb.AppendLine();
                            sb.Append(lineOfSource.Substring(0, match.To));
                            //sb.Append(match.Extract(lineOfSource));

                            current.Text = sb.ToString();

                            yield return(current);

                            current = null;
                            sb.Clear();

                            column = match.To;
                            prefix = null;

                            continue;
                        }
                        else
                        {
                            sb.AppendLine();
                            sb.Append(lineOfSource);

                            break;
                        }
                    }

                    // white space to skip
                    if (char.IsWhiteSpace(lineOfSource[column]))
                    {
                        column += 1;

                        continue;
                    }

                    // Test for lexeme
                    match = rule.Match(lineOfSource, column, context);

                    if (!match.IsMatch)
                    {
                        throw new TokenSyntaxException($"Syntax error at {line + 1:00000} : {column + 1:000}", line, column);
                    }

                    current = new Token(match.Description, line, match.From);

                    if (match.Kind == TokenMatchKind.Entire)
                    {
                        current.StopLine   = line;
                        current.StopColumn = match.To;
                        current.Text       = match.Extract(lineOfSource);

                        context.Add(current);

                        yield return(current);

                        current = null;
                        sb.Clear();

                        column = match.To;
                        prefix = null;

                        continue;
                    }

                    prefix = match.Extract(lineOfSource);
                    sb.Append(lineOfSource.Substring(match.From));

                    break;
                }
            }

            if (null != current)
            {
                throw new TokenSyntaxException($"Dangling token at {current.StartLine + 1:00000} : {current.StartColumn + 1:000}",
                                               current.StartLine,
                                               current.StartColumn);
            }

            yield break;
        }