/// <summary> /// Releases the unmanaged resources used by this /// <see cref="T:System.IO.TextReader"/> and optionally /// releases the managed resources. /// </summary> /// <param name="disposing"> /// Indicates whether disposale is occuring in the manageable code space. /// </param> protected override void Dispose(bool disposing) { if (disposing) { lock (this) { try { if (m_in != null) { m_in.close(); } } catch (java.io.IOException ex) { throw new IOException(ex.toString(), ex); } finally { m_in = null; m_reader = null; m_lineReader = null; base.Dispose(disposing); } } } else { base.Dispose(disposing); } }
private static void TagReader(Reader reader) { var tagger = new MaxentTagger(Model); foreach (List sentence in MaxentTagger.tokenizeText(reader).toArray()) { var tSentence = tagger.tagSentence(sentence); System.Console.WriteLine(Sentence.listToString(tSentence, false)); } }
public JavaReaderWrapper(java.io.Reader reader) { if (reader == null) { throw new NullReferenceException("reader"); } try { m_reader = new PushbackReader(reader, 1); m_lineReader = new BufferedReader(m_reader); } catch (Exception e) { throw new IOException(e.ToString(), e); } m_in = reader; }
public override void add(Reader r, string str, org.openrdf.rio.RDFFormat rdff, params org.openrdf.model.Resource[] rarr) { if (this._manager.IsReadOnly) throw NotWritableError("add"); base.add(r, str, rdff, rarr); }
internal static object JavaResultSetToClrWrapper(CallableStatement results, int columnIndex, JavaSqlTypes javaSqlType, int maxLength, ResultSetMetaData resultsMetaData) { object returnValue = null; sbyte[] sbyteArray; long milliseconds; long ticks; string s; columnIndex++; //jdbc style switch (javaSqlType) { case JavaSqlTypes.ARRAY: returnValue = results.getArray(columnIndex); break; case JavaSqlTypes.BIGINT: returnValue = results.getLong(columnIndex); break; case JavaSqlTypes.BINARY: case JavaSqlTypes.VARBINARY: case JavaSqlTypes.LONGVARBINARY: // FIXME : comsider using maxLength sbyteArray = results.getBytes(columnIndex); if (sbyteArray != null) { returnValue = vmw.common.TypeUtils.ToByteArray(sbyteArray); } break; case JavaSqlTypes.BIT: returnValue = results.getBoolean(columnIndex); break; case JavaSqlTypes.BLOB: // FIXME : comsider using maxLength java.sql.Blob blob = results.getBlob(columnIndex); if (blob != null) { InputStream input = blob.getBinaryStream(); if (input == null) { returnValue = new byte[0]; } else { long length = blob.length(); byte[] byteValue = new byte[length]; sbyte[] sbyteValue = vmw.common.TypeUtils.ToSByteArray(byteValue); input.read(sbyteValue); returnValue = byteValue; } } break; case JavaSqlTypes.CHAR: if (resultsMetaData != null && "uniqueidentifier".Equals(resultsMetaData.getColumnTypeName(columnIndex))) { returnValue = new Guid(results.getString(columnIndex)); } else { // Oracle Jdbc driver returns extra trailing 0 chars for NCHAR columns, so we threat this at parameter.Size level s = results.getString(columnIndex); if ((s != null) && (maxLength < s.Length)) { s = s.Substring(0, maxLength); } returnValue = s; } break; case JavaSqlTypes.CLOB: // FIXME : comsider using maxLength java.sql.Clob clob = results.getClob(columnIndex); if (clob != null) { java.io.Reader reader = clob.getCharacterStream(); if (reader == null) { returnValue = String.Empty; } else { long length = clob.length(); char[] charValue = new char[length]; reader.read(charValue); returnValue = new string(charValue); } } break; case JavaSqlTypes.TIME: Time t = results.getTime(columnIndex); if (t != null) { returnValue = new TimeSpan(JavaTimeToClrTicks(t)); } break; case JavaSqlTypes.DATE: Date d = results.getDate(columnIndex); if (d != null) { returnValue = new DateTime(JavaDateToClrTicks(d)); } break; case JavaSqlTypes.TIMESTAMP: Timestamp ts = results.getTimestamp(columnIndex); if (ts != null) { returnValue = new DateTime(JavaTimestampToClrTicks(ts)); } break; case JavaSqlTypes.DECIMAL: case JavaSqlTypes.NUMERIC: // java.sql.Types.NUMERIC (2), columnTypeName NUMBER, columnClassName java.math.BigDecimal // therefore we rely on scale if (resultsMetaData != null && resultsMetaData.getScale(columnIndex) == -127) { // Oracle db type FLOAT returnValue = results.getDouble(columnIndex); } else { java.math.BigDecimal bigDecimal = results.getBigDecimal(columnIndex); if (bigDecimal != null) { returnValue = vmw.common.PrimitiveTypeUtils.BigDecimalToDecimal(bigDecimal); } } break; case JavaSqlTypes.DISTINCT: returnValue = results.getObject(columnIndex); break; case JavaSqlTypes.DOUBLE: returnValue = results.getDouble(columnIndex); break; case JavaSqlTypes.FLOAT: //float f = results.getFloat(columnIndex); returnValue = results.getDouble(columnIndex); break; case JavaSqlTypes.INTEGER: returnValue = results.getInt(columnIndex); break; case JavaSqlTypes.JAVA_OBJECT: returnValue = results.getObject(columnIndex); break; case JavaSqlTypes.LONGVARCHAR: returnValue = results.getString(columnIndex); break; case JavaSqlTypes.NULL: returnValue = DBNull.Value; break; case JavaSqlTypes.OTHER: returnValue = results.getObject(columnIndex); break; case JavaSqlTypes.REAL: returnValue = results.getFloat(columnIndex); break; case JavaSqlTypes.REF: returnValue = results.getRef(columnIndex); break; case JavaSqlTypes.SMALLINT: returnValue = results.getShort(columnIndex); break; case JavaSqlTypes.STRUCT: returnValue = results.getObject(columnIndex); break; case JavaSqlTypes.TINYINT: returnValue = Convert.ToByte(results.getByte(columnIndex)); break; case JavaSqlTypes.VARCHAR: s = results.getString(columnIndex); if ((s != null) && (maxLength < s.Length)) { s = s.Substring(0, maxLength); } returnValue = s; break; default: returnValue = results.getObject(columnIndex); break; } if (results.wasNull() || results == null) { return(DBNull.Value); } return(returnValue); }
public override string readAsString(Reader reader) { throw new NotSupportedException("not supported by Concordion.NET"); }
private String readToEnd(Reader reader) { var sb = new StringBuilder(); var buff = new char[1024]; int read; while ((read = reader.read(buff)) != -1) { sb.append(buff, 0, read); } return sb.toString(); }
public virtual void copyWriter(Reader @in, Writer @out) { this.copyWriter(@in, @out, 4096); }
public virtual void copyWriter(Reader @in, Writer @out, int buffersize) { char[] chArray = new char[buffersize]; for (int index = @in.read(chArray); index > -1; index = @in.read(chArray)) @out.write(chArray, 0, index); }
public static Object LoadFromReader(Reader r, string baseUri, org.openrdf.rio.RDFFormat rdff) { Object obj; if (rdff == dotSesameFormats.RDFFormat.N3) { obj = new Graph(); if (baseUri != null) ((IGraph)obj).BaseUri = new Uri(baseUri); Notation3Parser parser = new Notation3Parser(); parser.Load((IGraph)obj, r.ToDotNetReadableStream()); } else if (rdff == dotSesameFormats.RDFFormat.NTRIPLES) { obj = new Graph(); if (baseUri != null) ((IGraph)obj).BaseUri = new Uri(baseUri); NTriplesParser parser = new NTriplesParser(); parser.Load((IGraph)obj, r.ToDotNetReadableStream()); } else if (rdff == dotSesameFormats.RDFFormat.RDFXML) { obj = new Graph(); if (baseUri != null) ((IGraph)obj).BaseUri = new Uri(baseUri); RdfXmlParser parser = new RdfXmlParser(); parser.Load((IGraph)obj, r.ToDotNetReadableStream()); } else if (rdff == dotSesameFormats.RDFFormat.TRIG) { obj = new TripleStore(); TriGParser trig = new TriGParser(); trig.Load((ITripleStore)obj, new StreamParams(r.ToDotNetReadableStream().BaseStream)); } else if (rdff == dotSesameFormats.RDFFormat.TRIX) { obj = new TripleStore(); TriXParser trix = new TriXParser(); trix.Load((ITripleStore)obj, new StreamParams(r.ToDotNetReadableStream().BaseStream)); } else if (rdff == dotSesameFormats.RDFFormat.TURTLE) { obj = new Graph(); if (baseUri != null) ((IGraph)obj).BaseUri = new Uri(baseUri); TurtleParser parser = new TurtleParser(); parser.Load((IGraph)obj, r.ToDotNetReadableStream()); } else { throw new RdfParserSelectionException("The given Input Format is not supported by dotNetRDF"); } return obj; }
/// <summary> /// Inserts content from the given stream which is expected /// to be in a format appropriate for this kind of content /// handler. /// </summary> abstract public void read(Reader @in, Document @doc, int @pos);
public static Document load(Reader reader) { return newDocumentBuilder().parse(new InputSource(reader)); }
public virtual CategoryDataset readCategoryDataset(Reader @in) { DefaultCategoryDataset defaultCategoryDataset = new DefaultCategoryDataset(); BufferedReader bufferedReader = new BufferedReader(@in); List list = (List) null; int num = 0; string str = bufferedReader.readLine(); while (str != null) { if (num == 0) list = this.extractColumnKeys(str); else this.extractRowKeyAndData(str, defaultCategoryDataset, list); str = bufferedReader.readLine(); ++num; } return (CategoryDataset) defaultCategoryDataset; }
/// <summary> /// Initializes from a stream. /// </summary> public void read(Reader @in, object @desc) { }
public virtual void add(Reader r, string str, org.openrdf.rio.RDFFormat rdff, params org.openrdf.model.Resource[] rarr) { Object obj = SesameHelper.LoadFromReader(r, str, rdff); IEnumerable<Uri> contexts = rarr.ToContexts(this._mapping); this.AddInternal(obj, contexts); }