Ejemplo n.º 1
0
        private static ArraySegment <TokenData <MetaToken> > LexArguments <T>(string src, TokenData <MetaToken> context, ArraySegment <TokenData <T> > tokens, int layer = 0) where T : Enum
        {
            var parts = new List <ArraySegment <TokenData <T> > >();
            var start = 0;
            var depth = 0;

            for (int i = 0; i < tokens.Count; i++)
            {
                var token = tokens[i];

                if (!(token.Type is Token type))
                {
                    throw new SourceError(token.GetSourceIndex(), token.GetSourceLength(), SourceError.WrongContext);
                }

                if (type == Token.LeftBracket)
                {
                    depth++;

                    if (depth == 1)
                    {
                        start = i + 1;
                    }
                }
                else if (type == Token.RightBracket)
                {
                    if (depth == 1)
                    {
                        parts.Add(tokens.Slice(start, i - start));
                        start = i + 1;
                    }

                    depth--;
                }
                else if (type == Token.Seperator && depth == 1)
                {
                    parts.Add(tokens.Slice(start, i - start));
                    start = i + 1;
                }
            }

            if (depth != 0)
            {
                throw new SourceError(tokens.First().GetSourceIndex(), tokens.First().GetSourceLength(), SourceError.InvalidSyntax);
            }

            return(parts.Select(part => new TokenData <MetaToken>(MetaToken.Argument, context.Start, context.Length, tokens.Select(t => (TokenData <Enum>)t).ToArray(), Trim(LexMeta(src, part, layer)))).ToArray());
        }
Ejemplo n.º 2
0
 private void MeasureTimerPrecision <TTimer>(TTimer timer)
     where TTimer : ITimer
 {
     for (int i = 0; i < 10; i++)
     {
         TimerMeasurer.MeasurePrecision(timer, m_precisionMeasurements);
         m_precisionMeasurements.Sort();
         var min    = m_precisionMeasurements.First();
         var median = m_precisionMeasurements.Middle();
         var max    = m_precisionMeasurements.Last();
         _output.WriteLine($"Precision {min} - {median} - {max} ticks");
     }
 }
Ejemplo n.º 3
0
        protected virtual StringContent SerializeToString(ContentType contentType, ArraySegment <object> args)
        {
            var mediaType = "application/json";

            if (contentType == ContentType.Xml)
            {
                mediaType = "application/xml";
            }

            if (args.Count == 1)
            {
                return(new StringContent(ModelSerializer.Serialize(args.First()), Encoding.UTF8, mediaType));
            }

            return(new StringContent(ModelSerializer.Serialize(args), Encoding.UTF8, mediaType));
        }
Ejemplo n.º 4
0
        void GetNodes(JsonElement current, ArraySegment <string> path, List <int> nodes)
        {
            if (!path.Any())
            {
                nodes.Add(current.GetArrayLength());
                return;
            }

            var split = path.First();

            if (split[0] == '[')
            {
                var splitValue = split.Substring(1, split.Length - 2);
                if (splitValue == "*")
                {
                    foreach (var next in current.EnumerateArray())
                    {
                        GetNodes(next, path.Slice(1), nodes);
                    }
                }
                else
                {
                    var splitIndex = int.Parse(splitValue);
                    var next       = current.EnumerateArray().Skip(splitIndex).First();
                    GetNodes(next, path.Slice(1), nodes);
                }
            }
            else
            {
                foreach (var kv in current.EnumerateObject())
                {
                    if (kv.Name == split)
                    {
                        var next = current.GetProperty(split);
                        GetNodes(next, path.Slice(1), nodes);
                        return;
                    }
                }

                // no prop
                nodes.Add(-1);
            }
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Generate regular expressions to match word sequences in original string.
        /// </summary>
        /// <param name="phrase">Original string to be processed.</param>
        /// <param name="maxLength">Maximum phrase length to support.</param>
        /// <returns>Array of regular expressions to match subsequences in input.</returns>
        /// <remarks>
        /// This function will call <see cref="CamelCase(string)"/> and then will generate sub-phrases up to maxLength.
        /// For example an enumeration of AngusBeefAndGarlicPizza would generate: 'angus?', 'beefs?', 'garlics?', 'pizzas?', 'angus? beefs?', 'garlics? pizzas?' and 'angus beef and garlic pizza'.
        /// You can call it directly, or it is used when <see cref="FieldReflector{T}"/> generates terms or when <see cref="TermsAttribute"/> is used with a <see cref="TermsAttribute.MaxPhrase"/> argument.
        /// </remarks>
        public static string[] GenerateTerms(string phrase, int maxLength)
        {
            var words = (from word in phrase.Split(' ') select word.ToLower()).ToArray();
            var terms = new List <string>();

            for (var length = 1; length <= Math.Min(words.Length, maxLength); ++length)
            {
                for (var start = 0; start <= words.Length - length; ++start)
                {
                    var ngram = new ArraySegment <string>(words, start, length);
                    if (!ArticleOrNone(ngram.First()) && !ArticleOrNone(ngram.Last()))
                    {
                        terms.Add(string.Join(" ", OptionalPlurals(ngram)));
                    }
                }
            }
            if (words.Length > maxLength)
            {
                terms.Add(string.Join(" ", words));
            }
            return(terms.ToArray());
        }
Ejemplo n.º 6
0
 /// <summary>
 /// Generate regular expressions to match word sequences in original string.
 /// </summary>
 /// <param name="phrase">Original string to be processed.</param>
 /// <param name="maxLength">Maximum phrase length to support.</param>
 /// <returns>Array of regular expressions to match subsequences in input.</returns>
 /// <remarks>
 /// This function will call <see cref="CamelCase(string)"/> and then will generate sub-phrases up to maxLength.  
 /// For example an enumeration of AngusBeefAndGarlicPizza would generate: 'angus?', 'beefs?', 'garlics?', 'pizzas?', 'angus? beefs?', 'garlics? pizzas?' and 'angus beef and garlic pizza'.
 /// You can call it directly, or it is used when <see cref="FieldReflector{T}"/> generates terms or when <see cref="TermsAttribute"/> is used with a <see cref="TermsAttribute.MaxPhrase"/> argument.
 /// </remarks>
 public static string[] GenerateTerms(string phrase, int maxLength)
 {
     var words = (from word in phrase.Split(' ') where word.Length > 0 select word.ToLower()).ToArray();
     var terms = new List<string>();
     for (var length = 1; length <= Math.Min(words.Length, maxLength); ++length)
     {
         for (var start = 0; start <= words.Length - length; ++start)
         {
             var ngram = new ArraySegment<string>(words, start, length);
             if (!ArticleOrNone(ngram.First()) && !ArticleOrNone(ngram.Last()))
             {
                 terms.Add(string.Join(" ", OptionalPlurals(ngram)));
             }
         }
     }
     if (words.Length > maxLength)
     {
         terms.Add(string.Join(" ", words));
     }
     return terms.ToArray();
 }