/// <exception cref="System.IO.IOException"></exception> public static void CopyStream(InputStream @is, OutputStream os) { int n; byte[] buffer = new byte[16384]; while ((n = @is.Read(buffer)) > -1) { os.Write(buffer, 0, n); } os.Close(); @is.Close(); }
/// <exception cref="System.IO.IOException"></exception> public static void CopyStreamToFile(InputStream @is, FilePath file) { OutputStream os = new FileOutputStream(file); int n; byte[] buffer = new byte[16384]; while ((n = @is.Read(buffer)) > -1) { os.Write(buffer, 0, n); } os.Close(); @is.Close(); }
/// <exception cref="System.IO.IOException"></exception> public static byte[] Read(InputStream @is) { int initialCapacity = 1024; ByteArrayBuffer byteArrayBuffer = new ByteArrayBuffer(initialCapacity); byte[] bytes = new byte[512]; int offset = 0; int numRead = 0; while ((numRead = @is.Read(bytes, offset, bytes.Length - offset)) >= 0) { byteArrayBuffer.Append(bytes, 0, numRead); offset += numRead; } return byteArrayBuffer.ToByteArray(); }
/// <summary>Loads the stream into a buffer.</summary> /// <param name="in">an InputStream</param> /// <exception cref="System.IO.IOException">If the stream cannot be read.</exception> public ByteBuffer(InputStream @in) { // load stream into buffer int chunk = 16384; this.length = 0; this.buffer = new sbyte[chunk]; int read; while ((read = @in.Read(this.buffer, this.length, chunk)) > 0) { this.length += read; if (read == chunk) { EnsureCapacity(length + chunk); } else { break; } } }
/// <exception cref="System.IO.IOException"></exception> internal DirCacheEntry(byte[] sharedInfo, MutableInteger infoAt, InputStream @in, MessageDigest md) { // private static final int P_CTIME_NSEC = 4; // private static final int P_MTIME_NSEC = 12; // private static final int P_DEV = 16; // private static final int P_INO = 20; // private static final int P_UID = 28; // private static final int P_GID = 32; info = sharedInfo; infoOffset = infoAt.value; IOUtil.ReadFully(@in, info, infoOffset, INFO_LEN); int len; if (IsExtended) { len = INFO_LEN_EXTENDED; IOUtil.ReadFully(@in, info, infoOffset + INFO_LEN, INFO_LEN_EXTENDED - INFO_LEN); if ((GetExtendedFlags() & ~EXTENDED_FLAGS) != 0) { throw new IOException(MessageFormat.Format(JGitText.Get().DIRCUnrecognizedExtendedFlags , GetExtendedFlags().ToString())); } } else { len = INFO_LEN; } infoAt.value += len; md.Update(info, infoOffset, len); int pathLen = NB.DecodeUInt16(info, infoOffset + P_FLAGS) & NAME_MASK; int skipped = 0; if (pathLen < NAME_MASK) { path = new byte[pathLen]; IOUtil.ReadFully(@in, path, 0, pathLen); md.Update(path, 0, pathLen); } else { ByteArrayOutputStream tmp = new ByteArrayOutputStream(); { byte[] buf = new byte[NAME_MASK]; IOUtil.ReadFully(@in, buf, 0, NAME_MASK); tmp.Write(buf); } for (; ; ) { int c = @in.Read(); if (c < 0) { throw new EOFException(JGitText.Get().shortReadOfBlock); } if (c == 0) { break; } tmp.Write(c); } path = tmp.ToByteArray(); pathLen = path.Length; skipped = 1; // we already skipped 1 '\0' above to break the loop. md.Update(path, 0, pathLen); md.Update(unchecked((byte)0)); } // Index records are padded out to the next 8 byte alignment // for historical reasons related to how C Git read the files. // int actLen = len + pathLen; int expLen = (actLen + 8) & ~7; int padLen = expLen - actLen - skipped; if (padLen > 0) { IOUtil.SkipFully(@in, padLen); md.Update(nullpad, 0, padLen); } }
internal virtual void Read(InputStream inputStream) { byte[] buffer = new byte[1024]; int len; length = 0; try { while ((len = inputStream.Read(buffer)) != -1) { outStream.Write(buffer, 0, len); sha1Digest.Update(buffer); md5Digest.Update(buffer); length += len; } } catch (IOException e) { throw new RuntimeException("Unable to read from stream.", e); } finally { try { inputStream.Close(); } catch (IOException e) { Log.W(Database.Tag, "Exception closing input stream", e); } } }
/// <exception cref="NSch.SftpException"></exception> public virtual void _put(InputStream src, string dst, SftpProgressMonitor monitor , int mode) { try { byte[] dstb = Util.Str2byte(dst, fEncoding); long skip = 0; if (mode == RESUME || mode == APPEND) { try { SftpATTRS attr = _stat(dstb); skip = attr.GetSize(); } catch (Exception) { } } //System.err.println(eee); if (mode == RESUME && skip > 0) { long skipped = src.Skip(skip); if (skipped < skip) { throw new SftpException(SSH_FX_FAILURE, "failed to resume for " + dst); } } if (mode == OVERWRITE) { SendOPENW(dstb); } else { SendOPENA(dstb); } ChannelHeader header = new ChannelHeader(this); header = Header(buf, header); int length = header.length; int type = header.type; Fill(buf, length); if (type != SSH_FXP_STATUS && type != SSH_FXP_HANDLE) { throw new SftpException(SSH_FX_FAILURE, "invalid type=" + type); } if (type == SSH_FXP_STATUS) { int i = buf.GetInt(); ThrowStatusError(buf, i); } byte[] handle = buf.GetString(); // handle byte[] data = null; bool dontcopy = true; if (!dontcopy) { data = new byte[buf.buffer.Length - (5 + 13 + 21 + handle.Length + 32 + 20)]; } // padding and mac long offset = 0; if (mode == RESUME || mode == APPEND) { offset += skip; } int startid = seq; int _ackid = seq; int ackcount = 0; while (true) { int nread = 0; int s = 0; int datalen = 0; int count = 0; if (!dontcopy) { datalen = data.Length - s; } else { data = buf.buffer; s = 5 + 13 + 21 + handle.Length; datalen = buf.buffer.Length - s - 32 - 20; } do { // padding and mac nread = src.Read(data, s, datalen); if (nread > 0) { s += nread; datalen -= nread; count += nread; } } while (datalen > 0 && nread > 0); if (count <= 0) { break; } int _i = count; while (_i > 0) { _i -= SendWRITE(handle, offset, data, 0, _i); if ((seq - 1) == startid || io_in.Available() >= 1024) { while (io_in.Available() > 0) { if (CheckStatus(ackid, header)) { _ackid = ackid[0]; if (startid > _ackid || _ackid > seq - 1) { if (_ackid == seq) { System.Console.Error.WriteLine("ack error: startid=" + startid + " seq=" + seq + " _ackid=" + _ackid); } else { //throw new SftpException(SSH_FX_FAILURE, "ack error:"); throw new SftpException(SSH_FX_FAILURE, "ack error: startid=" + startid + " seq=" + seq + " _ackid=" + _ackid); } } ackcount++; } else { break; } } } } offset += count; if (monitor != null && !monitor.Count(count)) { break; } } int _ackcount = seq - startid; while (_ackcount > ackcount) { if (!CheckStatus(null, header)) { break; } ackcount++; } if (monitor != null) { monitor.End(); } _sendCLOSE(handle, header); } catch (Exception e) { if (e is SftpException) { throw (SftpException)e; } if (e is Exception) { throw new SftpException(SSH_FX_FAILURE, e.ToString(), (Exception)e); } throw new SftpException(SSH_FX_FAILURE, e.ToString()); } }
/// <exception cref="System.IO.IOException"></exception> private static int ReadSome(InputStream @in, byte[] hdr, int off, int cnt) { int avail = 0; while (0 < cnt) { int n = @in.Read(hdr, off, cnt); if (n < 0) { break; } avail += n; off += n; cnt -= n; } return avail; }
/// <summary>Compute the name of an object, without inserting it.</summary> /// <remarks>Compute the name of an object, without inserting it.</remarks> /// <param name="objectType">type code of the object to store.</param> /// <param name="length"> /// number of bytes to scan from /// <code>in</code> /// . /// </param> /// <param name="in"> /// stream providing the object content. The caller is responsible /// for closing the stream. /// </param> /// <returns>the name of the object.</returns> /// <exception cref="System.IO.IOException">the source stream could not be read.</exception> public virtual ObjectId IdFor(int objectType, long length, InputStream @in) { MessageDigest md = Digest(); md.Update(Constants.EncodedTypeString(objectType)); md.Update(unchecked((byte)' ')); md.Update(Constants.EncodeASCII(length)); md.Update(unchecked((byte)0)); byte[] buf = Buffer(); while (length > 0) { int n = @in.Read(buf, 0, (int)Math.Min(length, buf.Length)); if (n < 0) { throw new EOFException("Unexpected end of input"); } md.Update(buf, 0, n); length -= n; } return ObjectId.FromRaw(md.Digest()); }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Diff.SimilarityIndex.TableFullException"></exception> internal virtual void Hash(InputStream @in, long remaining) { byte[] buf = new byte[4096]; int ptr = 0; int cnt = 0; while (0 < remaining) { int hash = 5381; // Hash one line, or one block, whichever occurs first. int n = 0; do { if (ptr == cnt) { ptr = 0; cnt = @in.Read(buf, 0, buf.Length); if (cnt <= 0) { throw new EOFException(); } } n++; int c = buf[ptr++] & unchecked((int)(0xff)); if (c == '\n') { break; } hash = (hash << 5) + hash + c; } while (n < 64 && n < remaining); Add(hash, n); remaining -= n; } }
// ignore any close errors, this was a read only stream /// <summary>Read an entire input stream into memory as a ByteBuffer.</summary> /// <remarks> /// Read an entire input stream into memory as a ByteBuffer. /// Note: The stream is read to its end and is not usable after calling this /// method. The caller is responsible for closing the stream. /// </remarks> /// <param name="in">input stream to be read.</param> /// <param name="sizeHint"> /// a hint on the approximate number of bytes contained in the /// stream, used to allocate temporary buffers more efficiently /// </param> /// <returns> /// complete contents of the input stream. The ByteBuffer always has /// a writable backing array, with /// <code>position() == 0</code> /// and /// <code>limit()</code> /// equal to the actual length read. Callers may rely /// on obtaining the underlying array for efficient data access. If /// <code>sizeHint</code> /// was too large, the array may be over-allocated, /// resulting in /// <code>limit() < array().length</code> /// . /// </returns> /// <exception cref="System.IO.IOException">there was an error reading from the stream. /// </exception> public static ByteBuffer ReadWholeStream(InputStream @in, int sizeHint) { byte[] @out = new byte[sizeHint]; int pos = 0; while (pos < @out.Length) { int read = @in.Read(@out, pos, @out.Length - pos); if (read < 0) { return ByteBuffer.Wrap(@out, 0, pos); } pos += read; } int last = @in.Read(); if (last < 0) { return ByteBuffer.Wrap(@out, 0, pos); } TemporaryBuffer.Heap tmp = new TemporaryBuffer.Heap(int.MaxValue); tmp.Write(@out); tmp.Write(last); tmp.Copy(@in); return ByteBuffer.Wrap(tmp.ToByteArray()); }
/// <summary>Read the entire byte array into memory, unless input is shorter</summary> /// <param name="fd">input stream to read the data from.</param> /// <param name="dst">buffer that must be fully populated, [off, off+len).</param> /// <param name="off">position within the buffer to start writing to.</param> /// <returns>number of bytes in buffer or stream, whichever is shortest</returns> /// <exception cref="System.IO.IOException">there was an error reading from the stream. /// </exception> public static int ReadFully(InputStream fd, byte[] dst, int off) { int r; int len = 0; while ((r = fd.Read(dst, off, dst.Length - off)) >= 0 && len < dst.Length) { off += r; len += r; } return len; }
/// <summary>Read the entire byte array into memory, or throw an exception.</summary> /// <remarks>Read the entire byte array into memory, or throw an exception.</remarks> /// <param name="fd">input stream to read the data from.</param> /// <param name="dst">buffer that must be fully populated, [off, off+len).</param> /// <param name="off">position within the buffer to start writing to.</param> /// <param name="len">number of bytes that must be read.</param> /// <exception cref="Sharpen.EOFException">the stream ended before dst was fully populated. /// </exception> /// <exception cref="System.IO.IOException">there was an error reading from the stream. /// </exception> public static void ReadFully(InputStream fd, byte[] dst, int off, int len) { while (len > 0) { int r = fd.Read(dst, off, len); if (r <= 0) { throw new EOFException(JGitText.Get().shortReadOfBlock); } off += r; len -= r; } }
/// <exception cref="NSch.JSchException"></exception> /// <exception cref="System.IO.IOException"></exception> private void Fill(InputStream @in, byte[] buf, int len) { int s = 0; while (s < len) { int i = @in.Read(buf, s, len - s); if (i <= 0) { throw new JSchException("ProxySOCKS5: stream is closed"); } s += i; } }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="System.IO.FileNotFoundException"></exception> /// <exception cref="Sharpen.Error"></exception> private FilePath ToTemp(MessageDigest md, int type, long len, InputStream @is) { bool delete = true; FilePath tmp = NewTempFile(); try { FileOutputStream fOut = new FileOutputStream(tmp); try { OutputStream @out = fOut; if (config.GetFSyncObjectFiles()) { @out = Channels.NewOutputStream(fOut.GetChannel()); } DeflaterOutputStream cOut = Compress(@out); DigestOutputStream dOut = new DigestOutputStream(cOut, md); WriteHeader(dOut, type, len); byte[] buf = Buffer(); while (len > 0) { int n = @is.Read(buf, 0, (int)Math.Min(len, buf.Length)); if (n <= 0) { throw ShortInput(len); } dOut.Write(buf, 0, n); len -= n; } dOut.Flush(); cOut.Finish(); } finally { if (config.GetFSyncObjectFiles()) { fOut.GetChannel().Force(true); } fOut.Close(); } delete = false; return tmp; } finally { if (delete) { FileUtils.Delete(tmp); } } }
/// <summary>Copy all bytes remaining on the input stream into this buffer.</summary> /// <remarks>Copy all bytes remaining on the input stream into this buffer.</remarks> /// <param name="in">the stream to read from, until EOF is reached.</param> /// <exception cref="System.IO.IOException"> /// an error occurred reading from the input stream, or while /// writing to a local temporary file. /// </exception> public virtual void Copy(InputStream @in) { if (blocks != null) { for (; ; ) { TemporaryBuffer.Block s = Last(); if (s.IsFull()) { if (ReachedInCoreLimit()) { break; } s = new TemporaryBuffer.Block(); blocks.AddItem(s); } int n = @in.Read(s.buffer, s.count, s.buffer.Length - s.count); if (n < 1) { return; } s.count += n; } } byte[] tmp = new byte[TemporaryBuffer.Block.SZ]; int n_1; while ((n_1 = @in.Read(tmp)) > 0) { overflow.Write(tmp, 0, n_1); } }
/// <summary>Reads the first two bytes from <code>inputStream</code>, then rewinds.</summary> /// <exception cref="System.IO.IOException"/> private static int PeekMagicNumber(InputStream inputStream) { inputStream.Mark(2); int byte1 = inputStream.Read(); int byte2 = inputStream.Read(); inputStream.Reset(); if (byte1 == -1 || byte2 == -1) { return -1; } return byte1 << 8 | byte2; }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="NGit.Errors.CorruptObjectException"></exception> private static void CheckValidEndOfStream(InputStream @in, Inflater inf, AnyObjectId id, byte[] buf) { for (; ; ) { int r; try { r = inf.Inflate(buf); } catch (SharpZipBaseException) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } if (r != 0) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectIncorrectLength); } if (inf.IsFinished) { if (inf.RemainingInput != 0 || @in.Read() != -1) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } break; } if (!inf.IsNeedingInput) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } r = @in.Read(buf); if (r <= 0) { throw new CorruptObjectException(id, JGitText.Get().corruptObjectBadStream); } inf.SetInput(buf, 0, r); } }
/// <exception cref="NSch.SftpException"></exception> public virtual void _put(InputStream src, string dst, SftpProgressMonitor monitor , int mode) { try { ((Channel.MyPipedInputStream)io_in).UpdateReadSide(); byte[] dstb = Util.Str2byte(dst, fEncoding); long skip = 0; if (mode == RESUME || mode == APPEND) { try { SftpATTRS attr = _stat(dstb); skip = attr.GetSize(); } catch (Exception) { } } //System.err.println(eee); if (mode == RESUME && skip > 0) { long skipped = src.Skip(skip); if (skipped < skip) { throw new SftpException(SSH_FX_FAILURE, "failed to resume for " + dst); } } if (mode == OVERWRITE) { SendOPENW(dstb); } else { SendOPENA(dstb); } ChannelHeader header = new ChannelHeader(this); header = Header(buf, header); int length = header.length; int type = header.type; Fill(buf, length); if (type != SSH_FXP_STATUS && type != SSH_FXP_HANDLE) { throw new SftpException(SSH_FX_FAILURE, "invalid type=" + type); } if (type == SSH_FXP_STATUS) { int i = buf.GetInt(); ThrowStatusError(buf, i); } byte[] handle = buf.GetString(); // handle byte[] data = null; bool dontcopy = true; if (!dontcopy) { // This case will not work anymore. data = new byte[obuf.buffer.Length - (5 + 13 + 21 + handle.Length + Session.buffer_margin )]; } long offset = 0; if (mode == RESUME || mode == APPEND) { offset += skip; } int startid = seq; int ackcount = 0; int _s = 0; int _datalen = 0; if (!dontcopy) { // This case will not work anymore. _datalen = data.Length; } else { data = obuf.buffer; _s = 5 + 13 + 21 + handle.Length; _datalen = obuf.buffer.Length - _s - Session.buffer_margin; } int bulk_requests = rq.Size(); while (true) { int nread = 0; int count = 0; int s = _s; int datalen = _datalen; do { nread = src.Read(data, s, datalen); if (nread > 0) { s += nread; datalen -= nread; count += nread; } } while (datalen > 0 && nread > 0); if (count <= 0) { break; } int foo = count; while (foo > 0) { if ((seq - 1) == startid || ((seq - startid) - ackcount) >= bulk_requests) { while (((seq - startid) - ackcount) >= bulk_requests) { if (this.rwsize >= foo) { break; } if (CheckStatus(ackid, header)) { int _ackid = ackid[0]; if (startid > _ackid || _ackid > seq - 1) { if (_ackid == seq) { System.Console.Error.WriteLine("ack error: startid=" + startid + " seq=" + seq + " _ackid=" + _ackid); } else { throw new SftpException(SSH_FX_FAILURE, "ack error: startid=" + startid + " seq=" + seq + " _ackid=" + _ackid); } } ackcount++; } else { break; } } } foo -= SendWRITE(handle, offset, data, 0, foo); } offset += count; if (monitor != null && !monitor.Count(count)) { break; } } int _ackcount = seq - startid; while (_ackcount > ackcount) { if (!CheckStatus(null, header)) { break; } ackcount++; } if (monitor != null) { monitor.End(); } _sendCLOSE(handle, header); } catch (Exception e) { if (e is SftpException) { throw (SftpException)e; } if (e is Exception) { throw new SftpException(SSH_FX_FAILURE, e.ToString(), (Exception)e); } throw new SftpException(SSH_FX_FAILURE, e.ToString()); } }
/// <summary> /// Determine heuristically whether the bytes contained in a stream /// represents binary (as opposed to text) content. /// </summary> /// <remarks> /// Determine heuristically whether the bytes contained in a stream /// represents binary (as opposed to text) content. /// Note: Do not further use this stream after having called this method! The /// stream may not be fully read and will be left at an unknown position /// after consuming an unknown number of bytes. The caller is responsible for /// closing the stream. /// </remarks> /// <param name="raw">input stream containing the raw file content.</param> /// <returns>true if raw is likely to be a binary file, false otherwise</returns> /// <exception cref="System.IO.IOException">if input stream could not be read</exception> public static bool IsBinary(InputStream raw) { byte[] buffer = new byte[FIRST_FEW_BYTES]; int cnt = 0; while (cnt < buffer.Length) { int n = raw.Read(buffer, cnt, buffer.Length - cnt); if (n == -1) { break; } cnt += n; } return IsBinary(buffer, cnt); }
public virtual bool StoreBlobStream(InputStream inputStream, BlobKey outKey) { FilePath tmp = null; try { tmp = FilePath.CreateTempFile(TmpFilePrefix, TmpFileExtension, new FilePath(path) ); FileOutputStream fos = new FileOutputStream(tmp); byte[] buffer = new byte[65536]; int lenRead = inputStream.Read(buffer); while (lenRead > 0) { fos.Write(buffer, 0, lenRead); lenRead = inputStream.Read(buffer); } inputStream.Close(); fos.Close(); } catch (IOException e) { Log.E(Log.TagBlobStore, "Error writing blog to tmp file", e); return false; } BlobKey newKey = KeyForBlobFromFile(tmp); outKey.SetBytes(newKey.GetBytes()); string path = PathForKey(outKey); FilePath file = new FilePath(path); if (file.CanRead()) { // object with this hash already exists, we should delete tmp file and return true tmp.Delete(); return true; } else { // does not exist, we should rename tmp file to this name tmp.RenameTo(file); } return true; }