コード例 #1
1
ファイル: FeatureParser.cs プロジェクト: iamkoch/pickles
        public Feature Parse(TextReader featureFileReader)
        {
            var language = this.DetermineLanguage();
            var gherkinParser = new Gherkin.Parser();

            Gherkin.Ast.Feature feature = gherkinParser.Parse(
                new Gherkin.TokenScanner(featureFileReader),
                new Gherkin.TokenMatcher(language));

            Feature result = new Mapper(feature.Language).MapToFeature(feature);

            return result;
        }
コード例 #2
0
ファイル: ANTLRReaderStream.cs プロジェクト: sklose/NCalc2
 public virtual void Load( TextReader r, int size, int readChunkSize )
 {
     if ( r == null )
     {
         return;
     }
     if ( size <= 0 )
     {
         size = InitialBufferSize;
     }
     if ( readChunkSize <= 0 )
     {
         readChunkSize = ReadBufferSize;
     }
     // System.out.println("load "+size+" in chunks of "+readChunkSize);
     try
     {
         data = r.ReadToEnd().ToCharArray();
         base.n = data.Length;
     }
     finally
     {
         r.Dispose();
     }
 }
コード例 #3
0
ファイル: Lexer.cs プロジェクト: gitter-badger/reko
 public Lexer(System.IO.TextReader rdr)
 {
     this.rdr = rdr;
     st = State.StartOfLine;
     lookahead = new Token(TokenType.EOFile);
     sb = new StringBuilder();
 }
コード例 #4
0
ファイル: CommandBase.cs プロジェクト: varixto/stellar
 public CommandBase()
 {
     //by default, read from/write to standard streams
     this.In = System.Console.In;
     this.Out = System.Console.Out;
     this.Error = System.Console.Error;
 }
コード例 #5
0
ファイル: Program.cs プロジェクト: Entalyan/Simple-IRC-Tests
        public static void Main(string[] args)
        {
            nick = "SecureIRC";
            owner = "SecureIRC";
            server = "irc.entalyan.com";
            port = 6999;
            chan = "#SecureIRC";
            pass = ""; //Enter just the password

            //Connect to irc server and get input and output text streams from TcpClient.
            sock.Connect(server, port);
            if (!sock.Connected)
            {
                Console.WriteLine("Failed to connect!");
                return;
            }
            input = new System.IO.StreamReader(sock.GetStream());
            output = new System.IO.StreamWriter(sock.GetStream());

            //Starting USER and NICK login commands 
            output.Write(
                "PASS " + nick + ":" + pass + "\r\n" +
                "USER " + nick + " 0 * :" + owner + "\r\n" +
                "NICK " + nick + "\r\n" +
                "PRIVMSG #SecureIRC Successful login at: " + DateTime.Now.ToString() + "\r\n"
            );
            output.Flush();

            Listen();

            Console.WriteLine("Press any key to exit.");
            Console.ReadKey();

        }
コード例 #6
0
ファイル: ANTLRReaderStream.cs プロジェクト: ksmyth/antlr
 public virtual void Load( TextReader r, int size, int readChunkSize )
 {
     if ( r == null )
     {
         return;
     }
     if ( size <= 0 )
     {
         size = INITIAL_BUFFER_SIZE;
     }
     if ( readChunkSize <= 0 )
     {
         readChunkSize = READ_BUFFER_SIZE;
     }
     // System.out.println("load "+size+" in chunks of "+readChunkSize);
     try
     {
         data = r.ReadToEnd().ToCharArray();
         base.n = data.Length;
     }
     finally
     {
         r.Close();
     }
 }
コード例 #7
0
 protected void Initializer(string FeatureName)
 {
     this.In = System.Console.In;
     this.Out = System.Console.Out;
     this.Error = System.Console.Out;
     Name = FeatureName;
 }
コード例 #8
0
internal CqlLexer(System.IO.TextReader reader) : this()
  {
  if (null == reader)
    {
          throw new System.Data.EntitySqlException(EntityRes.GetString(EntityRes.ParserInputError)); 
    }
  yy_reader = reader;
  }
コード例 #9
0
internal CqlLexer(System.IO.FileStream instream) : this()
  {
  if (null == instream)
    {
           throw new System.Data.EntitySqlException(EntityRes.GetString(EntityRes.ParserInputError)); 
    }
  yy_reader = new System.IO.StreamReader(instream);
  }
コード例 #10
0
ファイル: FeatureParser.cs プロジェクト: MikeThomas64/pickles
        public Feature Parse(TextReader featureFileReader)
        {
            var gherkinParser = new Gherkin.Parser();
            Gherkin.Ast.Feature feature = gherkinParser.Parse(featureFileReader);
            Feature result = new Mapper(feature.Language).MapToFeature(feature);

            return result;
        }
コード例 #11
0
 /// <summary>
 /// Creates a StringTemplateGroup instance that manages a set of
 /// templates defined in a "group file".
 /// </summary>
 /// <param name="reader">Input stream for group file data</param>
 /// <param name="lexer">Lexer to use for breaking up templates into chunks</param>
 /// <param name="errorListener">Error message sink</param>
 /// <param name="superGroup">Parent (or super/base) group</param>
 /// <returns>A StringTemplateGroup instance or null if no group is found</returns>
 public StringTemplateGroup CreateGroup(
     TextReader reader,
     Type lexer,
     IStringTemplateErrorListener errorListener,
     StringTemplateGroup superGroup)
 {
     return new StringTemplateGroup(reader, lexer, errorListener, superGroup);
 }
コード例 #12
0
		public void Dispose ()
		{
			if (_reader != null)
			{
				((IDisposable)_reader).Dispose();
				_reader = null;
			}
		}
コード例 #13
0
ファイル: Tokenizer.cs プロジェクト: JPT123/ravendb
		/// <summary>By default, closes the input Reader. </summary>
		public override void  Close()
		{
            if (input != null) {
                input.Close();
                // LUCENE-2387: don't hold onto Reader after close, so
                // GC can reclaim
                input = null;
            }

		}
コード例 #14
0
 public PosEventReader(System.IO.TextReader data, IPosContextGenerator contextGenerator)
 {
     mContextGenerator = contextGenerator;
     mTextReader = data;
     string nextLine = mTextReader.ReadLine();
     if (nextLine != null)
     {
         AddEvents(nextLine);
     }
 }
コード例 #15
0
ファイル: FeatureParser.cs プロジェクト: Narmical/pickles
        public Feature Parse(TextReader featureFileReader)
        {
            string fileContent = featureFileReader.ReadToEnd();

            var parser = new PicklesParser(this.languageService.GetLanguage());
            Lexer lexer = this.languageService.GetNativeLexer(parser);
            lexer.scan(fileContent);

            return parser.GetFeature();
        }
コード例 #16
0
ファイル: CharBuffer.cs プロジェクト: takeshik/parseq
        public CharBuffer(System.IO.TextReader reader, Int32 bufferSize)
        {
            _baseReader = reader;
            _bufferPtrBegin = 0;
            _bufferPtrEnd = 0;
            _bufferSize = Math.Max(bufferSize, MinBufferSize);
            _buffer = new Char[bufferSize];

            this.FillBuffer();
        }
コード例 #17
0
 public SimpleCharStream(System.IO.TextReader dstream, int startline,
     int startcolumn, int buffersize)
 {
     inputStream = dstream;
     line = startline;
     column = startcolumn - 1;
     available = bufsize = buffersize;
     buffer = new char[buffersize];
     bufline = new int[buffersize];
     bufcolumn = new int[buffersize];
 }
コード例 #18
0
 /// <summary>
 /// Creates the <seealso cref="TokenStream"/> of n-grams from the given <seealso cref="Reader"/> and <seealso cref="AttributeSource.AttributeFactory"/>. </summary>
 public override Tokenizer Create(AttributeSource.AttributeFactory factory, Reader input)
 {
     if (luceneMatchVersion.OnOrAfter(Version.LUCENE_44))
     {
         return new NGramTokenizer(luceneMatchVersion, factory, input, minGramSize, maxGramSize);
     }
     else
     {
         return new Lucene43NGramTokenizer(factory, input, minGramSize, maxGramSize);
     }
 }
コード例 #19
0
 public void Main(string[] args, 
     System.IO.TextReader In,
     System.IO.TextWriter Out,
     System.IO.TextWriter Error)
 {
     //this version of Main allows alternate streams
     this.In = In;
     this.Out = Out;
     this.Error = Error;
     this.Main(args);
 }
コード例 #20
0
ファイル: jsonLexer.cs プロジェクト: dw4dev/Phalanger
		public void Initialize(System.IO.TextReader reader, LexicalStates lexicalState, bool atBol)
		{
			this.expanding_token = false;
			this.token_start = 0;
			this.chars_read = 0;
			this.lookahead_index = 0;
			this.token_chunk_start = 0;
			this.token_end = 0;
			this.reader = reader;
			this.yy_at_bol = atBol;
			this.current_lexical_state = lexicalState;
		}
コード例 #21
0
ファイル: Lexer.cs プロジェクト: CraigTaylor/App.Port
        public Lexer(System.IO.TextReader reader)
        {
            _allowComments = true;
            _allowSingleQuotedStrings = true;

            _inputBuffer = 0;
            _stringBuffer = new System.Text.StringBuilder(128);
            _state = 1;
            EndOfInput = false;
            _reader = reader;

            _fsmContext = new FsmContext {L = this};
        }
コード例 #22
0
ファイル: FeatureParser.cs プロジェクト: ngm/pickles
        public Feature Parse(TextReader featureFileReader)
        {
            var language = this.DetermineLanguage();
            var gherkinParser = new Gherkin.Parser();

            Gherkin.Ast.GherkinDocument gherkinDocument = gherkinParser.Parse(
                new Gherkin.TokenScanner(featureFileReader),
                new Gherkin.TokenMatcher(language));

            Feature result = new Mapper(this.configuration, gherkinDocument.Feature.Language).MapToFeature(gherkinDocument);

            return result;
        }
コード例 #23
0
ファイル: ReaderInput.cs プロジェクト: w4x/boolangstudio
        public ReaderInput(string name, System.IO.TextReader reader)
        {
            if (null == name)
            {
                throw new ArgumentNullException("name");
            }

            if (null == reader)
            {
                throw new ArgumentNullException("reader");
            }

            _name = name;
            _reader = reader;
        }
コード例 #24
0
	  public override void Parse(Reader @in)
	  {
		LineNumberReader br = new LineNumberReader(@in);
		try
		{
		  string line = null;
		  string lastSynSetID = "";
		  CharsRef[] synset = new CharsRef[8];
		  int synsetSize = 0;

		  while ((line = br.readLine()) != null)
		  {
			string synSetID = line.Substring(2, 9);

			if (!synSetID.Equals(lastSynSetID))
			{
			  addInternal(synset, synsetSize);
			  synsetSize = 0;
			}

			if (synset.Length <= synsetSize+1)
			{
			  CharsRef[] larger = new CharsRef[synset.Length * 2];
			  Array.Copy(synset, 0, larger, 0, synsetSize);
			  synset = larger;
			}

			synset[synsetSize] = parseSynonym(line, synset[synsetSize]);
			synsetSize++;
			lastSynSetID = synSetID;
		  }

		  // final synset in the file
		  addInternal(synset, synsetSize);
		}
		catch (System.ArgumentException e)
		{
		  ParseException ex = new ParseException("Invalid synonym rule at line " + br.LineNumber, 0);
		  ex.initCause(e);
		  throw ex;
		}
		finally
		{
		  br.close();
		}
	  }
コード例 #25
0
        protected override void Dispose(bool disposing)
        {
            if (isDisposed) return;

            if (disposing)
            {
                if (input != null)
                {
                    input.Close();
                }
            }

            // LUCENE-2387: don't hold onto Reader after close, so
            // GC can reclaim
            input = null;
            isDisposed = true;
        }
コード例 #26
0
ファイル: Text.cs プロジェクト: gowantervo/coab
        public void Assign(string s, AssignType type)
        {
            _type = type;

            if (s != string.Empty)
            {
                if (type == AssignType.Read)
                {
                    reader = new System.IO.StreamReader(s);
                }
                else
                {
                    writer = new System.IO.StreamWriter(s);
                }
            }

            field_2 = 0xD7B0;
        }
コード例 #27
0
	  public override void Parse(Reader @in)
	  {
		LineNumberReader br = new LineNumberReader(@in);
		try
		{
		  addInternal(br);
		}
		catch (System.ArgumentException e)
		{
		  ParseException ex = new ParseException("Invalid synonym rule at line " + br.LineNumber, 0);
		  ex.initCause(e);
		  throw ex;
		}
		finally
		{
		  br.close();
		}
	  }
コード例 #28
0
ファイル: IRCBot.cs プロジェクト: mslot/IRCSharp
        public IRCBot(string server, int port, string dllPath, string username, string name, string channels, string hostname = "*****@*****.**")
            : base("main_bot_thread")
        {
            _channels = channels;
            _username = username;
            _dllPath = dllPath;
            _name = name;
            _hostname = hostname;
            _server = server;
            _port = port;
            _commandCollecter = new IRCSharp.Kernel.Collecters.CommandCollecter(_dllPath);
            _client = new System.Net.Sockets.TcpClient();
            _client.Connect(_server, _port);
            _clientStream = _client.GetStream();
            _ircWriter = new Model.Query.Writer.IRCWriter<System.IO.Stream>(_clientStream);
            _clientReader = new System.IO.StreamReader(_clientStream);

            _messageServer = new Messaging.MessageServer.MessageServer<Model.Query.IRCCommandQuery>(
                Messaging.Configuration.MessageServerConfiguration.BotServerQueuePath,
                Messaging.Configuration.MessageServerConfiguration.BotServerOutgoingPath
                );

            _messageServer.OutgoingReveived += OutgoingReveived;
        }
コード例 #29
0
ファイル: CharStream.cs プロジェクト: nasa03/Jasonity
 /** Reinitialise. */
 public virtual void ReInit(System.IO.TextReader dstream)
 {
     ReInit(dstream, 1, 1, 4096);
 }
コード例 #30
0
 public StreamLemmasFilter(System.IO.TextReader input, HebMorph.StreamLemmatizer _lemmatizer,
                           HebMorph.LemmaFilters.LemmaFilterBase _lemmaFilter)
 //: base(input) <- converts to CharStream, and causes issues due to a call to ReadToEnd in ctor
 {
     Init(input, _lemmatizer, _lemmaFilter, false);
 }
 /// <summary>
 /// Initializes a new instance of the ANTLRReaderStream class for the
 /// specified TextReader, initial data buffer size and, using
 /// a read buffer of the specified size
 /// </summary>
 public ANTLRReaderStream(TextReader reader, int size, int readChunkSize)
 {
     Load(reader, size, readChunkSize);
 }
コード例 #32
0
ファイル: DocInverterPerField.cs プロジェクト: vernon016/mono
        public override void  ProcessFields(Fieldable[] fields, int count)
        {
            fieldState.Reset(docState.doc.GetBoost());

            int maxFieldLength = docState.maxFieldLength;

            bool doInvert = consumer.Start(fields, count);

            for (int i = 0; i < count; i++)
            {
                Fieldable field = fields[i];

                // TODO FI: this should be "genericized" to querying
                // consumer if it wants to see this particular field
                // tokenized.
                if (field.IsIndexed() && doInvert)
                {
                    bool anyToken;

                    if (fieldState.length > 0)
                    {
                        fieldState.position += docState.analyzer.GetPositionIncrementGap(fieldInfo.name);
                    }

                    if (!field.IsTokenized())
                    {
                        // un-tokenized field
                        System.String stringValue = field.StringValue();
                        int           valueLength = stringValue.Length;
                        perThread.singleTokenTokenStream.Reinit(stringValue, 0, valueLength);
                        fieldState.attributeSource = perThread.singleTokenTokenStream;
                        consumer.Start(field);

                        bool success = false;
                        try
                        {
                            consumer.Add();
                            success = true;
                        }
                        finally
                        {
                            if (!success)
                            {
                                docState.docWriter.SetAborting();
                            }
                        }
                        fieldState.offset += valueLength;
                        fieldState.length++;
                        fieldState.position++;
                        anyToken = valueLength > 0;
                    }
                    else
                    {
                        // tokenized field
                        TokenStream stream;
                        TokenStream streamValue = field.TokenStreamValue();

                        if (streamValue != null)
                        {
                            stream = streamValue;
                        }
                        else
                        {
                            // the field does not have a TokenStream,
                            // so we have to obtain one from the analyzer
                            System.IO.TextReader reader;                             // find or make Reader
                            System.IO.TextReader readerValue = field.ReaderValue();

                            if (readerValue != null)
                            {
                                reader = readerValue;
                            }
                            else
                            {
                                System.String stringValue = field.StringValue();
                                if (stringValue == null)
                                {
                                    throw new System.ArgumentException("field must have either TokenStream, String or Reader value");
                                }
                                perThread.stringReader.Init(stringValue);
                                reader = perThread.stringReader;
                            }

                            // Tokenize field and add to postingTable
                            stream = docState.analyzer.ReusableTokenStream(fieldInfo.name, reader);
                        }

                        // reset the TokenStream to the first token
                        stream.Reset();

                        int startLength = fieldState.length;

                        // deprecated
                        bool allowMinus1Position = docState.allowMinus1Position;

                        try
                        {
                            int offsetEnd = fieldState.offset - 1;

                            bool hasMoreTokens = stream.IncrementToken();

                            fieldState.attributeSource = stream;

                            OffsetAttribute            offsetAttribute  = (OffsetAttribute)fieldState.attributeSource.AddAttribute(typeof(OffsetAttribute));
                            PositionIncrementAttribute posIncrAttribute = (PositionIncrementAttribute)fieldState.attributeSource.AddAttribute(typeof(PositionIncrementAttribute));

                            consumer.Start(field);

                            for (; ;)
                            {
                                // If we hit an exception in stream.next below
                                // (which is fairly common, eg if analyzer
                                // chokes on a given document), then it's
                                // non-aborting and (above) this one document
                                // will be marked as deleted, but still
                                // consume a docID

                                if (!hasMoreTokens)
                                {
                                    break;
                                }

                                int posIncr = posIncrAttribute.GetPositionIncrement();
                                fieldState.position += posIncr;
                                if (allowMinus1Position || fieldState.position > 0)
                                {
                                    fieldState.position--;
                                }

                                if (posIncr == 0)
                                {
                                    fieldState.numOverlap++;
                                }

                                bool success = false;
                                try
                                {
                                    // If we hit an exception in here, we abort
                                    // all buffered documents since the last
                                    // flush, on the likelihood that the
                                    // internal state of the consumer is now
                                    // corrupt and should not be flushed to a
                                    // new segment:
                                    consumer.Add();
                                    success = true;
                                }
                                finally
                                {
                                    if (!success)
                                    {
                                        docState.docWriter.SetAborting();
                                    }
                                }
                                fieldState.position++;
                                offsetEnd = fieldState.offset + offsetAttribute.EndOffset();
                                if (++fieldState.length >= maxFieldLength)
                                {
                                    if (docState.infoStream != null)
                                    {
                                        docState.infoStream.WriteLine("maxFieldLength " + maxFieldLength + " reached for field " + fieldInfo.name + ", ignoring following tokens");
                                    }
                                    break;
                                }

                                hasMoreTokens = stream.IncrementToken();
                            }
                            // trigger streams to perform end-of-stream operations
                            stream.End();

                            fieldState.offset += offsetAttribute.EndOffset();
                            anyToken           = fieldState.length > startLength;
                        }
                        finally
                        {
                            stream.Close();
                        }
                    }

                    if (anyToken)
                    {
                        fieldState.offset += docState.analyzer.GetOffsetGap(field);
                    }
                    fieldState.boost *= field.GetBoost();
                }

                // LUCENE-2387: don't hang onto the field, so GC can
                // reclaim
                fields[i] = null;
            }

            consumer.Finish();
            endConsumer.Finish();
        }
 /// <summary>
 /// Initializes a new instance of the ANTLRReaderStream class for the
 /// specified TextReader and initial data buffer size
 /// </summary>
 public ANTLRReaderStream(TextReader reader, int size)
     : this(reader, size, READ_BUFFER_SIZE)
 {
 }
コード例 #34
0
 /// <summary>Construct a new LetterTokenizer using a given {@link AttributeSource}. </summary>
 public LetterTokenizer(AttributeSource source, System.IO.TextReader in_Renamed) : base(source, in_Renamed)
 {
 }
コード例 #35
0
 /// <summary>Construct a new LetterTokenizer. </summary>
 public LetterTokenizer(System.IO.TextReader in_Renamed) : base(in_Renamed)
 {
 }
コード例 #36
0
 public override void  Reset(System.IO.TextReader reader)
 {
     base.Reset(reader);
     Reset();
 }
コード例 #37
0
 public StandardTokenizer(AttributeFactory factory, System.IO.TextReader input, bool replaceInvalidAcronym) : base(factory)
 {
     InitBlock();
     this.scanner = new StandardTokenizerImpl(input);
     Init(input, replaceInvalidAcronym);
 }
コード例 #38
0
ファイル: Tokenizer.cs プロジェクト: sainabob/teamlab.v7.5
 /// <summary>Construct a token stream processing the given input using the given AttributeSource. </summary>
 protected internal Tokenizer(AttributeSource source, System.IO.TextReader input) : base(source)
 {
     this.input = CharReader.Get(input);
 }
コード例 #39
0
 /// <summary>Creates a TokenStream which tokenizes all the text in the provided
 /// Reader.  Default implementation forwards to tokenStream(Reader) for
 /// compatibility with older version.  Override to allow Analyzer to choose
 /// strategy based on document and/or Field.  Must be able to handle null
 /// Field name for backward compatibility.
 /// </summary>
 public virtual TokenStream TokenStream(System.String fieldName, System.IO.TextReader reader)
 {
     // implemented for backward compatibility
     return(TokenStream(reader));
 }
コード例 #40
0
 /// <summary>Creates a TokenStream which tokenizes all the text in the provided
 /// Reader.  Provided for backward compatibility only.
 /// </summary>
 /// <deprecated> use TokenStream(String, Reader) instead.
 /// </deprecated>
 /// <seealso cref="Reader)">
 /// </seealso>
 public virtual TokenStream TokenStream(System.IO.TextReader reader)
 {
     return(TokenStream(null, reader));
 }
コード例 #41
0
ファイル: Analyzer.cs プロジェクト: ravendb/lucenenet
 /// <summary>Creates a TokenStream which tokenizes all the text in the provided
 /// Reader.  Must be able to handle null field name for
 /// backward compatibility.
 /// </summary>
 public abstract TokenStream TokenStream(String fieldName, System.IO.TextReader reader);
コード例 #42
0
 /// <summary>Construct a new LetterTokenizer using a given {@link Lucene.Net.Util.AttributeSource.AttributeFactory}. </summary>
 public LetterTokenizer(AttributeFactory factory, System.IO.TextReader in_Renamed) : base(factory, in_Renamed)
 {
 }
コード例 #43
0
 /// <summary> Creates a new StandardTokenizer with a given
 /// {@link org.apache.lucene.util.AttributeSource.AttributeFactory}
 /// </summary>
 public StandardTokenizer(Version matchVersion, AttributeFactory factory, System.IO.TextReader input) : base(factory)
 {
     InitBlock();
     this.scanner = new StandardTokenizerImpl(input);
     Init(input, matchVersion);
 }
コード例 #44
0
 public XPathDocument(System.IO.TextReader textReader)
 {
 }
コード例 #45
0
 public StandardTokenizer(System.IO.TextReader input) : this(Version.LUCENE_24, input)
 {
 }
コード例 #46
0
 public ConsoleInput(System.IO.TextReader stream, char separator = ' ')
 {
     this._separator = separator;
     this._stream    = stream;
     inputStream     = new Queue <string>();
 }
コード例 #47
0
 /// <summary>
 /// Deserializes the JSON payload from a text reader to an instance of type <typeparamref name="T"/>.
 /// </summary>
 /// <param name="reader">The text reader containing the JSON payload to deserialize.</param>
 /// <returns>An instance of type <typeparamref name="T"/> containing the deserialized JSON payload.</returns>
 protected override T DeserializeCore(System.IO.TextReader reader) => _parseReader(reader, _context);
コード例 #48
0
ファイル: MappingCharFilter.cs プロジェクト: zfxsss/lucenenet
 /// Easy-use constructor that takes a <see cref="System.IO.TextReader" />.
 public MappingCharFilter(NormalizeCharMap normMap, System.IO.TextReader @in)
     : base(CharReader.Get(@in))
 {
     this.normMap = normMap;
 }
コード例 #49
0
 /// <summary>Builds an analyzer with the stop words from the given reader.</summary>
 /// <seealso cref="WordlistLoader.GetWordSet(Reader)">
 /// </seealso>
 public StandardAnalyzer(System.IO.TextReader stopwords)
 {
     stopSet = WordlistLoader.GetWordSet(stopwords);
 }
コード例 #50
0
 public StreamLemmasFilter(System.IO.TextReader input, HebMorph.StreamLemmatizer _lemmatizer,
                           HebMorph.LemmaFilters.LemmaFilterBase _lemmaFilter, bool AlwaysSaveMarkedOriginal)
 //: base(input) <- converts to CharStream, and causes issues due to a call to ReadToEnd in ctor
 {
     Init(input, _lemmatizer, _lemmaFilter, AlwaysSaveMarkedOriginal);
 }
コード例 #51
0
 public virtual BatchParserAction Include(TextBlock filename, out System.IO.TextReader stream, out string newFilename)
 {
     throw new NotImplementedException("The method or operation is not implemented.");
 }
 /// <summary>
 /// Initializes a new instance of the ANTLRReaderStream class for the
 /// specified TextReader
 /// </summary>
 public ANTLRReaderStream(TextReader reader)
     : this(reader, INITIAL_BUFFER_SIZE, READ_BUFFER_SIZE)
 {
 }
コード例 #53
0
 public static System.Json.JsonValue Load(System.IO.TextReader textReader)
 {
     throw null;
 }
コード例 #54
0
 public static void SetIn(System.IO.TextReader newIn)
 {
     System.Console.SetIn(newIn);
 }
コード例 #55
0
ファイル: Tokenizer.cs プロジェクト: sainabob/teamlab.v7.5
 /// <summary>Expert: Reset the tokenizer to a new reader.  Typically, an
 /// analyzer (in its reusableTokenStream method) will use
 /// this to re-use a previously created tokenizer.
 /// </summary>
 public virtual void  Reset(System.IO.TextReader input)
 {
     this.input = input;
 }
コード例 #56
0
ファイル: Analyzer.cs プロジェクト: ravendb/lucenenet
 /// <summary>Creates a TokenStream that is allowed to be re-used
 /// from the previous time that the same thread called
 /// this method.  Callers that do not need to use more
 /// than one TokenStream at the same time from this
 /// analyzer should use this method for better
 /// performance.
 /// </summary>
 public virtual TokenStream ReusableTokenStream(String fieldName, System.IO.TextReader reader)
 {
     return(TokenStream(fieldName, reader));
 }
コード例 #57
0
ファイル: ANTLRReaderStream.cs プロジェクト: sklose/NCalc2
 public ANTLRReaderStream( TextReader r, int size, int readChunkSize )
 {
     Load( r, size, readChunkSize );
 }
コード例 #58
0
ファイル: Tokenizer.cs プロジェクト: sainabob/teamlab.v7.5
 /// <summary>Construct a token stream processing the given input. </summary>
 protected internal Tokenizer(System.IO.TextReader input)
 {
     this.input = CharReader.Get(input);
 }
コード例 #59
0
ファイル: Field.cs プロジェクト: emtees/old-code
		internal Field(System.String name, System.IO.TextReader reader)
		{
			if (name == null)
				throw new System.ArgumentException("name cannot be null");
			if (reader == null)
				throw new System.ArgumentException("value cannot be null");
			
			this.name = String.Intern(name); // Field names are interned
			this.readerValue = reader;
		}
コード例 #60
0
ファイル: Tokenizer.cs プロジェクト: sainabob/teamlab.v7.5
 /// <summary>Construct a token stream processing the given input using the given AttributeFactory. </summary>
 protected internal Tokenizer(AttributeFactory factory, System.IO.TextReader input) : base(factory)
 {
     this.input = CharReader.Get(input);
 }