Esempio n. 1
0
        /// <summary>
        /// Creates a token stream that tokenizes the given string into token terms
        /// (aka words).
        /// </summary>
        /// <param name="fieldName">
        ///            the name of the field to tokenize (currently ignored). </param>
        /// <param name="reader">
        ///            reader (e.g. charfilter) of the original text. can be null. </param>
        /// <param name="text">
        ///            the string to tokenize </param>
        /// <returns> a new token stream </returns>
        public TokenStreamComponents CreateComponents(string fieldName, TextReader reader, string text)
        {
            // Ideally the Analyzer superclass should have a method with the same signature, 
            // with a default impl that simply delegates to the StringReader flavour. 
            if (reader == null)
            {
                reader = new FastStringReader(text);
            }

            if (pattern == NON_WORD_PATTERN) // fast path
            {
                return new TokenStreamComponents(new FastStringTokenizer(reader, true, toLowerCase, stopWords));
            } // fast path
            else if (pattern == WHITESPACE_PATTERN)
            {
                return new TokenStreamComponents(new FastStringTokenizer(reader, false, toLowerCase, stopWords));
            }

            Tokenizer tokenizer = new PatternTokenizer(reader, pattern, toLowerCase);
            TokenStream result = (stopWords != null) ? (TokenStream)new StopFilter(matchVersion, tokenizer, stopWords) : tokenizer;
            return new TokenStreamComponents(tokenizer, result);
        }
Esempio n. 2
0
        /// <summary>
        /// Creates a token stream that tokenizes the given string into token terms
        /// (aka words).
        /// </summary>
        /// <param name="fieldName">
        ///            the name of the field to tokenize (currently ignored). </param>
        /// <param name="reader">
        ///            reader (e.g. charfilter) of the original text. can be null. </param>
        /// <param name="text">
        ///            the string to tokenize </param>
        /// <returns> a new token stream </returns>
        public TokenStreamComponents createComponents(string fieldName, TextReader reader, string text)
        {
            // Ideally the Analyzer superclass should have a method with the same signature, 
            // with a default impl that simply delegates to the StringReader flavour. 
            if (reader == null)
            {
                reader = new FastStringReader(text);
            }

            if (pattern == NON_WORD_PATTERN) // fast path
            {
                return new TokenStreamComponents(new FastStringTokenizer(reader, true, toLowerCase, stopWords));
            } // fast path
            else if (pattern == WHITESPACE_PATTERN)
            {
                return new TokenStreamComponents(new FastStringTokenizer(reader, false, toLowerCase, stopWords));
            }

            Tokenizer tokenizer = new PatternTokenizer(reader, pattern, toLowerCase);
            TokenStream result = (stopWords != null) ? new StopFilter(matchVersion, tokenizer, stopWords) : tokenizer;
            return new TokenStreamComponents(tokenizer, result);
        }
Esempio n. 3
0
    public void Load()
    {
        if (_loaded)
        {
            return;
        }

        _levels = new List <LevelData>();

        if (data == null)
        {
            data = Resources.Load("leveldata", typeof(TextAsset)) as TextAsset;

            if (data == null)
            {
                D.log("[Levels] level data is null!");
                return;
            }
        }

        using (FastStringReader reader = new FastStringReader(data.text))
        {
            bool skip = false;
            while (!skip)
            {
                LevelData one = new LevelData();
                one.data = new char[G.rows, G.cols];
                for (int i = 0; i < G.rows; i++)
                {
                    for (int j = 0; j < G.cols; j++)
                    {
                        one.data[i, j] = '-';
                    }
                }

                // here's the big impact
                for (int i = 0; i < G.rowsInData; i++)
                {
                    string line;

                    line = reader.ReadLine();

                    if (line == null)
                    {
                        D.log("[Levels] The data format is corrupted!");
                        skip = true;
                        break;
                    }

                    line = line.Trim().Replace(" ", "");

                    for (int j = 0; j < line.Length; j++)
                    {
                        one.data[i, j] = line[j];
                    }
                }

                if (!skip)
                {
                    _levels.Add(one);

                    // consume the blank line.
                    string line = reader.ReadLine();
                    if (line == null)
                    {
                        skip = true;
                    }
                }
            }
        }

        D.log("[Levels] {0} levels found.", _levels.Count);

        _loaded = true;
    }