internal Lexer(string s) { tok = new StreamTokenizer(new CharArrayReader(s.ToCharArray())); tok.QuoteChar('"'); tok.ParseNumbers(); tok.OrdinaryChar(','); tok.OrdinaryChar('('); tok.OrdinaryChar(')'); tok.WordChars('$', '$'); tok.WordChars('_', '_'); }
private void initTokenizer() { st = new StreamTokenizer(new StreamReader(i, Encoding.GetEncoding("ISO-8859-1"))); st.ResetSyntax(); st.WhitespaceChars(0, 32); st.WordChars(33, 255); st.CommentChar('!'); st.QuoteChar('\''); st.EolIsSignificant(false); }
public static void InputGraph(IGraph inputGraph, Stream inputStream, int bufferSize, string defaultEdgeLabel, string vertexIdKey, string edgeIdKey, string edgeLabelKey) { if (inputGraph == null) { throw new ArgumentNullException(nameof(inputGraph)); } if (inputStream == null) { throw new ArgumentNullException(nameof(inputStream)); } if (bufferSize <= 0) { throw new ArgumentException("bufferSize must be greater than zero"); } if (string.IsNullOrWhiteSpace(defaultEdgeLabel)) { throw new ArgumentNullException(nameof(defaultEdgeLabel)); } var graph = BatchGraph.Wrap(inputGraph, bufferSize); using (var r = new StreamReader(inputStream, Encoding.GetEncoding("ISO-8859-1"))) { var st = new StreamTokenizer(r); try { st.CommentChar(GmlTokens.CommentChar); st.OrdinaryChar('['); st.OrdinaryChar(']'); const string stringCharacters = "/\\(){}<>!£$%^&*-+=,.?:;@_`|~"; for (var i = 0; i < stringCharacters.Length; i++) { st.WordChars(stringCharacters.ElementAt(i), stringCharacters.ElementAt(i)); } new GmlParser(graph, defaultEdgeLabel, vertexIdKey, edgeIdKey, edgeLabelKey).Parse(st); graph.Commit(); } catch (IOException e) { throw new IOException(string.Concat("GML malformed line number ", st.LineNumber, ": "), e); } } }
/** * /// Constructs an ExtendedStreamTokenizer from the given Reader. This ExtendedStreamTokenizer has no comment * /// characters. * * /// @param reader the source of the data * /// @param eolIsSignificant true if eol is significant */ public ExtendedStreamTokenizer(StreamReader reader, Boolean eolIsSignificant) { _reader = reader; if (reader != null) { _tokenizer = new StreamTokenizer(reader); _tokenizer.ResetSyntax(); _tokenizer.WhitespaceChars(0, 32); _tokenizer.WordChars(33, 255); _tokenizer.EolIsSignificant(eolIsSignificant); } else { _atEof = true; } _putbackList = new List <String>(); }
private static StreamTokenizer CreateTokenizer(TextReader reader) { StreamTokenizer tokenizer = new StreamTokenizer(reader); tokenizer.ResetSyntax(); tokenizer.WordChars('a', 'z'); tokenizer.WordChars('A', 'Z'); tokenizer.WordChars(128 + 32, 255); tokenizer.WordChars('0', '9'); tokenizer.WordChars('-', '-'); tokenizer.WordChars('+', '+'); tokenizer.WordChars('.', '.'); tokenizer.WhitespaceChars(0, ' '); tokenizer.CommentChar('#'); return(tokenizer); }