Exemplo n.º 1
0
 internal Lexer(string s)
 {
     tok = new StreamTokenizer(new CharArrayReader(s.ToCharArray()));
     tok.QuoteChar('"');
     tok.ParseNumbers();
     tok.OrdinaryChar(',');
     tok.OrdinaryChar('(');
     tok.OrdinaryChar(')');
     tok.WordChars('$', '$');
     tok.WordChars('_', '_');
 }
Exemplo n.º 2
0
 private void initTokenizer()
 {
     st = new StreamTokenizer(new StreamReader(i, Encoding.GetEncoding("ISO-8859-1")));
     st.ResetSyntax();
     st.WhitespaceChars(0, 32);
     st.WordChars(33, 255);
     st.CommentChar('!');
     st.QuoteChar('\'');
     st.EolIsSignificant(false);
 }
Exemplo n.º 3
0
        public static void InputGraph(IGraph inputGraph, Stream inputStream, int bufferSize,
                                      string defaultEdgeLabel, string vertexIdKey, string edgeIdKey,
                                      string edgeLabelKey)
        {
            if (inputGraph == null)
            {
                throw new ArgumentNullException(nameof(inputGraph));
            }
            if (inputStream == null)
            {
                throw new ArgumentNullException(nameof(inputStream));
            }
            if (bufferSize <= 0)
            {
                throw new ArgumentException("bufferSize must be greater than zero");
            }
            if (string.IsNullOrWhiteSpace(defaultEdgeLabel))
            {
                throw new ArgumentNullException(nameof(defaultEdgeLabel));
            }

            var graph = BatchGraph.Wrap(inputGraph, bufferSize);

            using (var r = new StreamReader(inputStream, Encoding.GetEncoding("ISO-8859-1")))
            {
                var st = new StreamTokenizer(r);

                try
                {
                    st.CommentChar(GmlTokens.CommentChar);
                    st.OrdinaryChar('[');
                    st.OrdinaryChar(']');

                    const string stringCharacters = "/\\(){}<>!£$%^&*-+=,.?:;@_`|~";
                    for (var i = 0; i < stringCharacters.Length; i++)
                    {
                        st.WordChars(stringCharacters.ElementAt(i), stringCharacters.ElementAt(i));
                    }

                    new GmlParser(graph, defaultEdgeLabel, vertexIdKey, edgeIdKey, edgeLabelKey).Parse(st);

                    graph.Commit();
                }
                catch (IOException e)
                {
                    throw new IOException(string.Concat("GML malformed line number ", st.LineNumber, ": "), e);
                }
            }
        }
Exemplo n.º 4
0
 /**
  * /// Constructs an ExtendedStreamTokenizer from the given Reader. This ExtendedStreamTokenizer has no comment
  * /// characters.
  *
  * /// @param reader           the source of the data
  * /// @param eolIsSignificant true if eol is significant
  */
 public ExtendedStreamTokenizer(StreamReader reader, Boolean eolIsSignificant)
 {
     _reader = reader;
     if (reader != null)
     {
         _tokenizer = new StreamTokenizer(reader);
         _tokenizer.ResetSyntax();
         _tokenizer.WhitespaceChars(0, 32);
         _tokenizer.WordChars(33, 255);
         _tokenizer.EolIsSignificant(eolIsSignificant);
     }
     else
     {
         _atEof = true;
     }
     _putbackList = new List <String>();
 }
Exemplo n.º 5
0
        private static StreamTokenizer CreateTokenizer(TextReader reader)
        {
            StreamTokenizer tokenizer = new StreamTokenizer(reader);

            tokenizer.ResetSyntax();
            tokenizer.WordChars('a', 'z');
            tokenizer.WordChars('A', 'Z');
            tokenizer.WordChars(128 + 32, 255);
            tokenizer.WordChars('0', '9');
            tokenizer.WordChars('-', '-');
            tokenizer.WordChars('+', '+');
            tokenizer.WordChars('.', '.');
            tokenizer.WhitespaceChars(0, ' ');
            tokenizer.CommentChar('#');

            return(tokenizer);
        }