/* * Reads all the bytes from the given input stream. * * Calls read multiple times on the given input stream until it receives an * end of file marker. Returns the combined results as a byte array. Note * that this method may block if the underlying stream read blocks. * * @param is * the input stream to be read. * @return the content of the stream as a byte array. * @throws IOException * if a read error occurs. */ public static byte[] readFullyAndClose(java.io.InputStream isJ) // throws IOException { { try { // Initial read byte[] buffer = new byte[1024]; int count = isJ.read(buffer); int nextByte = isJ.read(); // Did we get it all in one read? if (nextByte == -1) { byte[] dest = new byte[count]; java.lang.SystemJ.arraycopy(buffer, 0, dest, 0, count); return(dest); } // Requires additional reads java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(count * 2); baos.write(buffer, 0, count); baos.write(nextByte); while (true) { count = isJ.read(buffer); if (count == -1) { return(baos.toByteArray()); } baos.write(buffer, 0, count); } } finally { isJ.close(); } }
/** * Read through an InputStream and returns the digest for the data * * @param digest * The MessageDigest to use (e.g. MD5) * @param data * Data to digest * @return MD5 digest * @throws IOException * On error reading from the stream */ private static byte[] digest(java.security.MessageDigest digest, java.io.InputStream data) {//throws IOException { byte[] buffer = new byte[STREAM_BUFFER_LENGTH]; int read = data.read(buffer, 0, STREAM_BUFFER_LENGTH); while (read > -1) { digest.update(buffer, 0, read); read = data.read(buffer, 0, STREAM_BUFFER_LENGTH); } return(digest.digest()); }
/** * Create an archive input stream from an input stream, autodetecting * the archive type from the first few bytes of the stream. The InputStream * must support marks, like BufferedInputStream. * * @param in the input stream * @return the archive input stream * @throws ArchiveException if the archiver name is not known * @throws IllegalArgumentException if the stream is null or does not support mark */ public ArchiveInputStream createArchiveInputStream(java.io.InputStream inJ) //throws ArchiveException { if (inJ == null) { throw new java.lang.IllegalArgumentException("Stream must not be null."); } if (!inJ.markSupported()) { throw new java.lang.IllegalArgumentException("Mark is not supported."); } byte[] signature = new byte[12]; inJ.mark(signature.Length); try { int signatureLength = inJ.read(signature); inJ.reset(); if (ZipArchiveInputStream.matches(signature, signatureLength)) { return(new ZipArchiveInputStream(inJ)); } else if (JarArchiveInputStream.matches(signature, signatureLength)) { return(new JarArchiveInputStream(inJ)); } else if (ArArchiveInputStream.matches(signature, signatureLength)) { return(new ArArchiveInputStream(inJ)); } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { return(new CpioArchiveInputStream(inJ)); } // Tar needs a bigger buffer to check the signature; read the first block byte[] tarheader = new byte[512]; inJ.mark(tarheader.Length); signatureLength = inJ.read(tarheader); inJ.reset(); if (TarArchiveInputStream.matches(tarheader, signatureLength)) { return(new TarArchiveInputStream(inJ)); } } catch (java.io.IOException e) { throw new ArchiveException("Could not use reset and mark operations.", e); } throw new ArchiveException("No Archiver found for the stream signature"); }
private int bsR(int n) //throws IOException { int bsLiveShadow = this.bsLive; int bsBuffShadow = this.bsBuff; if (bsLiveShadow < n) { java.io.InputStream inShadow = this.inJ; do { int thech = inShadow.read(); if (thech < 0) { throw new java.io.IOException("unexpected end of stream"); } bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; } while (bsLiveShadow < n); this.bsBuff = bsBuffShadow; } this.bsLive = bsLiveShadow - n; return((bsBuffShadow >> (bsLiveShadow - n)) & ((1 << n) - 1)); }
/** * Create an compressor input stream from an input stream, autodetecting * the compressor type from the first few bytes of the stream. The InputStream * must support marks, like BufferedInputStream. * * @param in the input stream * @return the compressor input stream * @throws CompressorException if the compressor name is not known * @throws IllegalArgumentException if the stream is null or does not support mark * @since Commons Compress 1.1 */ public CompressorInputStream createCompressorInputStream(java.io.InputStream inJ) //throws CompressorException { if (inJ == null) { throw new java.lang.IllegalArgumentException("Stream must not be null."); } if (!inJ.markSupported()) { throw new java.lang.IllegalArgumentException("Mark is not supported."); } byte[] signature = new byte[12]; inJ.mark(signature.Length); try { int signatureLength = inJ.read(signature); inJ.reset(); if (BZip2CompressorInputStream.matches(signature, signatureLength)) { return(new BZip2CompressorInputStream(inJ)); } if (GzipCompressorInputStream.matches(signature, signatureLength)) { return(new GzipCompressorInputStream(inJ)); } } catch (java.io.IOException e) { throw new CompressorException("Failed to detect Compressor from InputStream.", e); } throw new CompressorException("No Compressor found for the stream signature."); }
/* * Read a two-byte short in little-endian order. */ internal int readShortLE(java.io.InputStream inJ) //throws IOException { if (inJ.read(b, 0, 2) == 2) { return((b[0] & 0XFF) | ((b[1] & 0XFF) << 8)); } else { throw new java.io.EOFException();//Messages.getString("archive.3C")); } }
/** * Copies the content of a InputStream into an OutputStream * * @param input * the InputStream to copy * @param output * the target Stream * @param buffersize * the buffer size to use * @throws IOException * if an error occurs */ public static long copy(java.io.InputStream input, java.io.OutputStream output, int buffersize) //throws IOException { byte[] buffer = new byte[buffersize]; int n = 0; long count = 0; while (-1 != (n = input.read(buffer))) { output.write(buffer, 0, n); count += n; } return(count); }
static internal byte [] toByteArray(java.io.InputStream inJ) { java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buffer = new byte[32 * 1024]; int bytesRead; while ((bytesRead = inJ.read(buffer)) > 0) { baos.write(buffer, 0, bytesRead); } byte[] bytes = baos.toByteArray(); return(bytes); }
/* * Read a four-byte int in little-endian order. */ internal long readIntLE(java.io.InputStream inJ) //throws IOException { if (inJ.read(b, 0, 4) == 4) { return((((b[0] & 0XFF)) | ((b[1] & 0XFF) << 8) | ((b[2] & 0XFF) << 16) | ((b[3] & 0XFF) << 24)) & 0XFFFFFFFFL); // Here for sure NO sign extension is wanted. } else { throw new java.io.EOFException();//Messages.getString("archive.3D")); } }
/* * Helper to read the entire contents of the manifest from the * given input stream. Usually we can do this in a single read * but we need to account for 'infinite' streams, by ensuring we * have a line feed within a reasonable number of characters. */ private byte[] readFully(java.io.InputStream isJ) // throws IOException { // Initial read { byte[] buffer = new byte[4096]; int count = isJ.read(buffer); int nextByte = isJ.read(); // Did we get it all in one read? if (nextByte == -1) { byte[] dest = new byte[count]; java.lang.SystemJ.arraycopy(buffer, 0, dest, 0, count); return(dest); } // Does it look like a manifest? if (!containsLine(buffer, count)) { // archive.2E=Manifest is too long throw new java.io.IOException("Manifest is too long"); //$NON-NLS-1$ } // Requires additional reads java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(count * 2); baos.write(buffer, 0, count); baos.write(nextByte); while (true) { count = isJ.read(buffer); if (count == -1) { return(baos.toByteArray()); } baos.write(buffer, 0, count); } }
private void myReadFully(java.io.InputStream inJ, byte[] b) //throws IOException { int len = b.Length; int off = 0; while (len > 0) { int count = inJ.read(b, off, len); if (count <= 0) { throw new java.io.EOFException(); } off += count; len -= count; } }
private int getAndMoveToFrontDecode0(int groupNo) //throws IOException { java.io.InputStream inShadow = this.inJ; DataI dataShadow = this.data; int zt = dataShadow.selector[groupNo] & 0xff; int[] limit_zt = java.util.Arrays <int> .getIndexArray(dataShadow.limit, zt);;// dataShadow.limit[zt]; int zn = dataShadow.minLens[zt]; int zvec = bsR(zn); int bsLiveShadow = this.bsLive; int bsBuffShadow = this.bsBuff; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new java.io.IOException("unexpected end of stream"); } } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } this.bsLive = bsLiveShadow; this.bsBuff = bsBuffShadow; return(dataShadow.perm[zt, zvec - dataShadow.baseJ[zt, zn]]); }
private void getAndMoveToFrontDecode() //throws IOException { this.origPtr = bsR(24); recvDecodingTables(); java.io.InputStream inShadow = this.inJ; DataI dataShadow = this.data; byte[] ll8 = dataShadow.ll8; int[] unzftab = dataShadow.unzftab; byte[] selector = dataShadow.selector; byte[] seqToUnseq = dataShadow.seqToUnseq; char[] yy = dataShadow.getAndMoveToFrontDecode_yy; int[] minLens = dataShadow.minLens; int[,] limit = dataShadow.limit; int[,] baseJ = dataShadow.baseJ; int[,] perm = dataShadow.perm; int limitLast = this.blockSize100k * 100000; /* * Setting up the unzftab entries here is not strictly necessary, but it * does save having to do it later in a separate pass, and so saves a * block's worth of cache misses. */ for (int i = 256; --i >= 0;) { yy[i] = (char)i; unzftab[i] = 0; } int groupNo = 0; int groupPos = BZip2Constants.G_SIZE - 1; int eob = this.nInUse + 1; int nextSym = getAndMoveToFrontDecode0(0); int bsBuffShadow = this.bsBuff; int bsLiveShadow = this.bsLive; int lastShadow = -1; int zt = selector[groupNo] & 0xff; int[] base_zt = java.util.Arrays <int> .getIndexArray(baseJ, zt); // baseJ[zt]; int[] limit_zt = java.util.Arrays <int> .getIndexArray(limit, zt); // limit[zt]; int[] perm_zt = java.util.Arrays <int> .getIndexArray(perm, zt); // perm[zt]; int minLens_zt = minLens[zt]; while (nextSym != eob) { if ((nextSym == BZip2Constants.RUNA) || (nextSym == BZip2Constants.RUNB)) { int s = -1; for (int n = 1; true; n <<= 1) { if (nextSym == BZip2Constants.RUNA) { s += n; } else if (nextSym == BZip2Constants.RUNB) { s += n << 1; } else { break; } if (groupNo == 18000 && groupPos == 0) { Console.Beep(); } if (groupPos == 0) { groupPos = BZip2Constants.G_SIZE - 1; zt = selector[++groupNo] & 0xff; base_zt = java.util.Arrays <int> .getIndexArray(baseJ, zt); // baseJ[zt]; limit_zt = java.util.Arrays <int> .getIndexArray(limit, zt); // limit[zt]; perm_zt = java.util.Arrays <int> .getIndexArray(perm, zt); // perm[zt]; minLens_zt = minLens[zt]; } else { groupPos--; } int zn = minLens_zt; // Inlined: // int zvec = bsR(zn); while (bsLiveShadow < zn) { int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new java.io.IOException("unexpected end of stream"); } } int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1 << zn) - 1); bsLiveShadow -= zn; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new java.io.IOException( "unexpected end of stream"); } } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } nextSym = perm_zt[zvec - base_zt[zn]]; } byte ch = seqToUnseq[yy[0]]; unzftab[ch & 0xff] += s + 1; while (s-- >= 0) { ll8[++lastShadow] = ch; } if (lastShadow >= limitLast) { throw new java.io.IOException("block overrun"); } } else { if (++lastShadow >= limitLast) { throw new java.io.IOException("block overrun"); } char tmp = yy[nextSym - 1]; unzftab[seqToUnseq[tmp] & 0xff]++; ll8[lastShadow] = seqToUnseq[tmp]; /* * This loop is hammered during decompression, hence avoid * native method call overhead of System.arraycopy for very * small ranges to copy. */ if (nextSym <= 16) { for (int j = nextSym - 1; j > 0;) { yy[j] = yy[--j]; } } else { java.lang.SystemJ.arraycopy(yy, 0, yy, 1, nextSym - 1); } yy[0] = tmp; if (groupPos == 0) { groupPos = BZip2Constants.G_SIZE - 1; zt = selector[++groupNo] & 0xff; base_zt = java.util.Arrays <int> .getIndexArray(baseJ, zt); // baseJ[zt]; limit_zt = java.util.Arrays <int> .getIndexArray(limit, zt); //limit[zt]; perm_zt = java.util.Arrays <int> .getIndexArray(perm, zt); //perm[zt]; minLens_zt = minLens[zt]; } else { groupPos--; } int zn = minLens_zt; // Inlined: // int zvec = bsR(zn); while (bsLiveShadow < zn) { int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new java.io.IOException("unexpected end of stream"); } } int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1 << zn) - 1); bsLiveShadow -= zn; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new java.io.IOException("unexpected end of stream"); } } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } nextSym = perm_zt[zvec - base_zt[zn]]; } } this.last = lastShadow; this.bsLive = bsLiveShadow; this.bsBuff = bsBuffShadow; }
/** * @return false if End-Of-File, else true */ private bool readBlock() //throws IOException { if (inStream == null) { throw new java.io.IOException("reading from an output buffer"); } currRecIdx = 0; int offset = 0; int bytesNeeded = blockSize; while (bytesNeeded > 0) { long numBytes = inStream.read(blockBuffer, offset, bytesNeeded); // // NOTE // We have fit EOF, and the block is not full! // // This is a broken archive. It does not follow the standard // blocking algorithm. However, because we are generous, and // it requires little effort, we will simply ignore the error // and continue as if the entire block were read. This does // not appear to break anything upstream. We used to return // false in this case. // // Thanks to '*****@*****.**' for this fix. // if (numBytes == -1) { if (offset == 0) { // Ensure that we do not read gigabytes of zeros // for a corrupt tar file. // See http://issues.apache.org/bugzilla/show_bug.cgi?id=39924 return(false); } // However, just leaving the unread portion of the buffer dirty does // cause problems in some cases. This problem is described in // http://issues.apache.org/bugzilla/show_bug.cgi?id=29877 // // The solution is to fill the unused portion of the buffer with zeros. java.util.Arrays <byte> .fill(blockBuffer, offset, offset + bytesNeeded, (byte)0); break; } offset = (int)(offset + numBytes); bytesNeeded = (int)(bytesNeeded - numBytes); if (numBytes != blockSize) { // TODO: Incomplete Read occured - throw exception? // Bastie: I do! throw new java.io.IOException("Incomplete read occured"); } } currBlkIdx++; return(true); }
public override int read(byte[] buffer, int start, int length) //throws IOException { if (closed) { throw new java.io.IOException("The stream is closed"); } if (inf.finished() || current == null) { return(-1); } // avoid int overflow, check null buffer if (start <= buffer.Length && length >= 0 && start >= 0 && buffer.Length - start >= length) { ZipUtil.checkRequestedFeatures(current); if (!supportsDataDescriptorFor(current)) { throw new UnsupportedZipFeatureException(Feature.DATA_DESCRIPTOR, current); } if (current.getMethod() == ZipArchiveOutputStream.STORED) { if (hasDataDescriptor) { if (lastStoredEntry == null) { readStoredEntry(); } return(lastStoredEntry.read(buffer, start, length)); } int csize = (int)current.getSize(); if (readBytesOfEntry >= csize) { return(-1); } if (offsetInBuffer >= lengthOfLastRead) { offsetInBuffer = 0; if ((lengthOfLastRead = inJ.read(buf)) == -1) { return(-1); } count(lengthOfLastRead); bytesReadFromStream += lengthOfLastRead; } int toRead = length > lengthOfLastRead ? lengthOfLastRead - offsetInBuffer : length; if ((csize - readBytesOfEntry) < toRead) { toRead = csize - readBytesOfEntry; } java.lang.SystemJ.arraycopy(buf, offsetInBuffer, buffer, start, toRead); offsetInBuffer += toRead; readBytesOfEntry += toRead; crc.update(buffer, start, toRead); return(toRead); } if (inf.needsInput()) { fill(); if (lengthOfLastRead > 0) { bytesReadFromStream += lengthOfLastRead; } } int read = 0; try { read = inf.inflate(buffer, start, length); } catch (java.util.zip.DataFormatException e) { throw new java.util.zip.ZipException(e.getMessage()); } if (read == 0) { if (inf.finished()) { return(-1); } else if (lengthOfLastRead == -1) { throw new java.io.IOException("Truncated ZIP file"); } } crc.update(buffer, start, read); return(read); } throw new java.lang.ArrayIndexOutOfBoundsException(); }