public void Unir(FileInfo fichero, DirectoryInfo dirDest) { DalleStream dstream = new DalleStream (fichero); byte[] buffer = new byte[Consts.BUFFER_LENGTH]; OnProgress (0, dstream.Length); Stream gzipStream = new GZipStream (dstream, CompressionMode.Decompress); TarInputStream tarStream = new TarInputStream (gzipStream); TarEntry tarEntry = null; OnProgress (0, 1); while ((tarEntry = tarStream.GetNextEntry ()) != null) { // Tamaño de la cabecera de la entrada. // Nota: TarInputStream ignora sileciosamente algunas entradas, // por lo que el progreso no será totalmente preciso. if (tarEntry.IsDirectory) { continue; } Stream entrada = new SizeLimiterStream (tarStream, tarEntry.Size); Stream salida = UtilidadesFicheros.CreateWriter (dirDest.FullName + Path.DirectorySeparatorChar + tarEntry.Name); int leidos = 0; while ((leidos = entrada.Read (buffer, 0, buffer.Length)) > 0) { salida.Write (buffer, 0, leidos); OnProgress (dstream.Position, dstream.Length+1); // +1 para evitar llegar al 100% antes de tiempo } salida.Close (); } tarStream.Close (); OnProgress (1, 1); }
protected override void _Unir(string fichero, string dirDest) { long transferidos = 0; string formato = ""; bool comprimido = false; string bas = fichero.Substring (0, fichero.LastIndexOf ('.', fichero.Length - 7)); if (bas.ToLower ().EndsWith (".zip")) { comprimido = true; } formato = bas + ".{0}." + fichero.Substring (fichero.Length - 5); int i = 1; string f = String.Format (formato, i); while (File.Exists (f)) { i++; f = String.Format (formato, i); } f = String.Format (formato, i - 1); ColaAxman c = ColaAxman3.LoadFromFile (f); string destino = dirDest + Path.DirectorySeparatorChar + c.Nombre; Stream os = UtilidadesFicheros.CreateWriter (destino); byte[] buffer = new byte[Consts.BUFFER_LENGTH]; int leidos = 0; for (i = 1; i <= c.Fragmentos; i++) { f = String.Format (formato, i); Stream ins = File.OpenRead (f); if (23 != ins.Read (buffer, 0, 23)) { throw new IOException ("Unexpected end of file:" + f); } CabeceraAxman cabAxman = CabeceraAxman.LoadFromArray (buffer); if (cabAxman.Fragmento != i) { throw new Dalle.Formatos.FileFormatException (); } crc.Reset (); Stream inStream = ins; if (i == c.Fragmentos) { inStream = new SizeLimiterStream (ins, c.TamanoOriginal - transferidos); } while ((leidos = inStream.Read (buffer, 0, buffer.Length)) > 0) { os.Write (buffer, 0, leidos); transferidos += leidos; OnProgress (transferidos, c.TamanoOriginal); } ins.Close (); } os.Close (); if (comprimido) { Dalle.Formatos.Zip.Zip zip = new Dalle.Formatos.Zip.Zip (); zip.Progress += new ProgressEventHandler (this.OnProgress); zip.Unir (destino); try { File.Delete (destino); } catch (Exception) { } } }
private void Init() { ReadRPMLead (); ReadRPMSignature (); ReadRPMHeader (); if (this.payloadSize > 0) { this.totalLength = this.Position + this.payloadSize; } long limitSize = this.inputStream.Length - this.inputStream.Position; Stream s = new SizeLimiterStream(this.inputStream,limitSize); switch (payloadCompressor) { case "gzip": s = new GZipStream(s, CompressionMode.Decompress); break; case "bzip2": s = new BZip2InputStream (s); break; case "lzma": s = new LZMAInputStream(s); break; default: throw new IOException("Unsupported payload compression:" + payloadCompressor); } switch (payloadFormat){ case "cpio": cpioStream = new CpioArchiveInputStream(s); break; default : throw new IOException("Unsupported payload format:" + payloadFormat); } }
protected override void _Unir(string fichero, string dirDest) { OnProgress (0, 1); string nombre = new FileInfo (fichero).Name.Replace (".001.gsp", ""); string baseName = fichero.Replace (".001.gsp", ""); string fSalida = dirDest + Path.DirectorySeparatorChar + nombre; Stream salida = UtilidadesFicheros.CreateWriter (fSalida); HashStream hs = new HashStream (salida, MD5.Create ()); int i = 0; byte[] buffer = new byte[Consts.BUFFER_LENGTH]; int bytesRead = 0; long parcial = 0; long total = 0; int fragmentosTotales = 0; bool hasMd5 = false; string fileMd5 = ""; //string headerVersion; //string headerFileName; for (i = 1; File.Exists (String.Format (baseName + ".{0:000}.gsp", i)); i++) { String nombreFragmento = String.Format (baseName + ".{0:000}.gsp", i); Stream fileInStream = File.OpenRead (nombreFragmento); Stream inStream = fileInStream; if (i == 1) { // Read header bytesRead = inStream.Read (buffer, 0, 69); if (bytesRead != 69) { throw new IOException ("Premature end of file"); } hasMd5 = (buffer[0x38] != 0); fragmentosTotales = UtArrays.LeerInt32BE (buffer, 0x39); //headerVersion = UtArrays.LeerTexto (buffer, 1, 4); //headerFileName = UtArrays.LeerTexto (buffer, 6, 50); total = UtArrays.LeerInt64BE(buffer, 0x3D); } if (i == fragmentosTotales && hasMd5) { inStream = new SizeLimiterStream (inStream, total - parcial); } while ((bytesRead = inStream.Read (buffer, 0, buffer.Length)) > 0) { hs.Write (buffer, 0, bytesRead); parcial += bytesRead; OnProgress(parcial, total); } if (i == fragmentosTotales && hasMd5) { bytesRead = fileInStream.Read (buffer, 0, 32); if (bytesRead != 32) { throw new IOException ("Premature end of file:" + nombreFragmento); } fileMd5 = UtArrays.LeerTexto (buffer, 0, 32); } fileInStream.Close (); } hs.Close (); if (hasMd5 && !fileMd5.ToLower().Equals (hs.Hash.ToLower())) { throw new IOException ("md5 verification failed"); } OnProgress (parcial, total); }
public ArArchiveEntry GetNextArEntry() { // read to the end of current entry data if (currentEntry != null) { long iread = 0; do { iread = this.Read (buffer, 0, buffer.Length); } while (iread > 0); currentEntry = null; } if (this.Position == 0) { // First entry // File header byte[] expected = new System.Text.ASCIIEncoding ().GetBytes (ArArchiveEntry.HEADER); byte[] realized = new byte[expected.Length]; int read = inputStream.Read (realized, 0, expected.Length); this.Count (read); if (read != expected.Length) { throw new IOException ("failed to read header. Occured at byte: " + this.Position); } for (int i = 0; i < expected.Length; i++) { if (expected[i] != realized[i]) { throw new IOException ("invalid header " + new System.Text.ASCIIEncoding ().GetString (realized)); } } } if (this.Position % 2 != 0) { if (inputStream.ReadByte () < 0) { // hit eof return null; } this.Count (1); } byte[] header = new byte[60]; int rea = this.inputStream.Read (header, 0, header.Length); this.Count (rea); if (rea <= 0) { // Clean end of file; return null; } if (rea != header.Length) { throw new IOException ("invalid header"); } if (header[58] != 0x60 || header[59] != 0x0A) { throw new IOException ("invalid magic tail on header"); } string name = Utilidades.UtArrays.LeerTexto (header, 0, 16).Trim (); long size = long.Parse (Utilidades.UtArrays.LeerTexto (header, 48, 10).Trim ()); if (name.EndsWith ("/")) { name = name.Substring (0, name.Length - 1); } // TODO Leer todos los campos de la cabecera currentEntry = new ArArchiveEntry (name, size); // GNU AR if (currentEntry.Name.Equals ("/")) { ReadGNUFilenamesEntry (); return GetNextArEntry (); } else if (currentEntry.Name.StartsWith ("/") && gnuNames.Count > 0) { currentEntry.name = (string)gnuNames[currentEntry.Name.Substring(1)]; } // BSD AR if (currentEntry.Name.StartsWith ("#1/")) { string t = currentEntry.Name.Substring (3); int s = int.Parse (t); if (s > 2048) { throw new IOException ("Filename too long (bsd)"); } byte[] buffer2 = new byte[s]; int l = this.inputStream.Read (buffer2, 0, buffer2.Length); if (l != s) { throw new IOException ("Filename error (bsd)"); } currentEntry.name = Utilidades.UtArrays.LeerTexto (buffer2, 0); } dataStream = new SizeLimiterStream (this.inputStream, currentEntry.Size); return currentEntry; }
protected override void _Unir(string fichero, string dirDest) { XtremsplitInfo info = XtremsplitInfo.GetFromFile (fichero); if (info == null) { throw new FileFormatException (""); } byte[] buffer = new byte[Consts.BUFFER_LENGTH]; int leidos = 0; long transferidos = 0; OnProgress (0, info.Length); string[] hashesAlmacenados = new string[info.Fragments]; string[] hashesCalculados = new string[info.Fragments]; Stream fos = UtilidadesFicheros.CreateWriter (dirDest + Path.DirectorySeparatorChar + info.OriginalFileName); for (int i = 1; i <= info.Fragments; i++) { Stream fis = File.OpenRead (info.GetFragmentName (i)); fis.Seek (info.GetOffset (i), SeekOrigin.Begin); Stream cis = fis; if (info.IsExe && i == 1) { cis = new SizeLimiterStream (cis, info.DataSizeInExe); } while ((leidos = cis.Read (buffer, 0, (int)Math.Min (buffer.Length, info.Length - transferidos))) > 0) { fos.Write (buffer, 0, leidos); transferidos += leidos; OnProgress (transferidos, info.Length); } if (transferidos == info.Length && info.IsMd5) { byte[] hash = new byte[32]; for (int j = 0; j < hashesAlmacenados.Length; j++) { int lei = fis.Read (hash, 0, hash.Length); if (lei == 32) { hashesAlmacenados[j] = UtArrays.LeerTexto (hash, 0).ToUpper (); } } } fis.Close (); } fos.Close (); if (info.IsMd5) { HashAlgorithm md5 = MD5.Create (); for (int i = 1; i <= info.Fragments; i++) { Stream fis = File.OpenRead (info.GetFragmentName (i)); Stream cis = fis; if (info.IsExe && i == 1) { fis.Seek (info.GetOffset (i) - XtremsplitInfo.HEADER_SIZE, SeekOrigin.Begin); cis = new SizeLimiterStream (cis, info.DataSizeInExe + XtremsplitInfo.HEADER_SIZE); } if (i == info.Fragments) { cis = new SizeLimiterStream (cis, new FileInfo (info.GetFragmentName (i)).Length - info.Fragments * 32); } byte[] res = md5.ComputeHash (cis); fis.Close (); hashesCalculados[i - 1] = UtilidadesCadenas.FormatHexHash (res).ToUpper (); } for (int i = 0; i < info.Fragments; i++) { if (!hashesCalculados[i].Equals (hashesAlmacenados[i])) { throw new ChecksumVerificationException ("Checksum verification failed",info.GetFragmentName(i+1)); } } // Comparamos los resultados } }
private void ReadGNUFilenamesEntry() { dataStream = new SizeLimiterStream (this.inputStream, this.currentEntry.Size); byte[] datos = new byte[this.currentEntry.Size]; int l = this.Read (datos, 0, datos.Length); if (l != datos.Length) { throw new IOException (); } string c = Utilidades.UtArrays.LeerTexto (datos, 0); string[] ficheros = c.Split (new char[] { '\n' }); int pos = 0; foreach (string f in ficheros) { string nombre = f; if (nombre.EndsWith("/")){ nombre = nombre.Substring (0, nombre.Length - 1); } gnuNames.Add ("" + pos, nombre); pos += f.Length + 1; } }
protected override void _Unir(string fichero, string dirDest) { OnProgress (0, 1); long total = new FileInfo (fichero).Length; CamouflageMetaInfo info = CamouflageMetaInfo.LoadFromFile (fichero); if (info == null) { throw new Exception (); } long pos = 0; long largoPiel = info.Archivos[0].Tamano; string destino = dirDest + Path.DirectorySeparatorChar + fichero; int leidos = 0; byte[] buffer = new byte[Consts.BUFFER_LENGTH]; FileStream inStream = File.OpenRead (fichero); if (true) { // Este código extrae la piel destino = dirDest + Path.DirectorySeparatorChar + info.Archivos[0].Nombre; Stream os = UtilidadesFicheros.CreateWriter (destino); Stream sls = new SizeLimiterStream (inStream, largoPiel); while ((leidos = sls.Read (buffer, 0, buffer.Length)) > 0) { os.Write (buffer, 0, leidos); pos += leidos; OnProgress (pos, total); } os.Close (); // Permisos y tiempos de acceso if (26 != inStream.Read (buffer, 0, 26)) { throw new IOException ("Unexpected end of file"); } pos += 26; } byte[] lc = new byte[4]; byte[] chunk = new byte[1024 * 1024]; Aleatorizador aleat = new Aleatorizador (); for (int i = 1; i < info.Archivos.Length; i++) { destino = dirDest + Path.DirectorySeparatorChar + info.Archivos[i].Nombre; Stream os = UtilidadesFicheros.CreateWriter (destino); int largoChunk = 0; do { if (4 != inStream.Read (lc, 0, 4)) { throw new IOException ("Unexpected end of file"); } largoChunk = UtArrays.LeerInt32 (lc, 0); pos += 4; if (largoChunk > 0) { aleat.Reset (); leidos = inStream.Read (chunk, 0, largoChunk); if (leidos != largoChunk) { throw new IOException ("Unexpected end of file"); } pos += largoChunk; aleat.Desencriptar (chunk, 0, leidos); os.Write (chunk, 0, leidos); } OnProgress (pos, total); } while (largoChunk > 0); os.Close (); // Permisos y tiempos de acceso if (26 != inStream.Read (buffer, 0, 26)) { throw new IOException ("Unexpected end of file"); } pos += 26; } inStream.Close (); OnProgress (total, total); //byte[] perm = UtilidadesFicheros.LeerSeek (fichero, pos, 26); //info.Archivos[0].Permisos = UtArrays.LeerInt16 (perm, 0); //info.Archivos[0].Creado = UtArrays.LeerDateTime (perm, 2); //info.Archivos[0].Accedido = UtArrays.LeerDateTime (perm, 10); //info.Archivos[0].Modificado = UtArrays.LeerDateTime (perm, 18); //FileInfo fi = new FileInfo (destino); /* Debería funcionar pero el programa falla fi.CreationTime = info.Archivos[0].Creado; fi.LastAccessTime = info.Archivos[0].Accedido; fi.LastWriteTime = info.Archivos[0].Modificado; */ //fi.Attributes = (FileAttributes) info.Archivos[0].Permisos; }
protected override void _Unir(string fichero, string dirDest) { if (!File.Exists (fichero)) { return; } FileInfo fi = new FileInfo (fichero); long datosTotales = fi.Length; long uncompressedSize = 0; FileStream input = File.OpenRead (fichero); Stream input2 = input; if (fichero.ToLower ().EndsWith (".bz2") || fichero.ToLower ().EndsWith (".tbz2") || fichero.ToLower ().EndsWith (".tbz")) { // No hay forma de saber el tamaño descomprimido de un bz2 de forma inmediata input2 = new BZip2InputStream (input); } else if (fichero.ToLower ().EndsWith (".gz") || fichero.ToLower ().EndsWith (".tgz")) { uncompressedSize = Dalle.Formatos.GZip.GZip.GetUncompressedSize (input); input2 = new GZipStream (input, CompressionMode.Decompress); } else if (fichero.ToLower ().EndsWith (".tar.lzma") || fichero.ToLower ().EndsWith ("tlz")) { input2 = new LZMAInputStream (input); uncompressedSize = ((LZMAInputStream)input2).UncompressedSize; } TarInputStream tarInput = new TarInputStream (input2); TarEntry tarEntry = null; byte[] buffer = new byte[Consts.BUFFER_LENGTH]; OnProgress (0, 1); long transferidos = 0; while ((tarEntry = tarInput.GetNextEntry ()) != null) { // Tamaño de la cabecera de la entrada. // Nota: TarInputStream ignora sileciosamente algunas entradas, // por lo que el progreso no será totalmente preciso. transferidos += 512; if (tarEntry.IsDirectory) { continue; } Stream entrada = new SizeLimiterStream (tarInput, tarEntry.Size); Stream salida = UtilidadesFicheros.CreateWriter (dirDest + Path.DirectorySeparatorChar + tarEntry.Name); int leidos = 0; while ((leidos = entrada.Read (buffer, 0, buffer.Length)) > 0) { salida.Write (buffer, 0, leidos); transferidos += leidos; if (uncompressedSize > 0) { OnProgress (transferidos, uncompressedSize); } else { OnProgress (input.Position, datosTotales); } } salida.Close (); transferidos += 512 - (tarEntry.Size % 512); } tarInput.Close (); OnProgress (1, 1); }
private void ReadHeader() { byte[] header = new byte[XAR_HEADER_SIZE]; int leidos = this.inputStream.Read (header, 0, header.Length); this.Count (leidos); if (leidos != header.Length) { throw new IOException ("Invalid header, readed " + leidos + " bytes expected " + header.Length); } int magic = UtArrays.LeerInt32BE (header, 0); int size = UtArrays.LeerInt16BE (header, 4); version = UtArrays.LeerInt16BE (header, 6); long toc_length_compressed = UtArrays.LeerInt64BE (header, 8); long toc_length_uncompressed = UtArrays.LeerInt64BE (header, 16); int cksum_alg = UtArrays.LeerInt32BE (header, 24); if (magic != XAR_HEADER_MAGIC) { throw new Exception ("Invalid header, not valid magic:" + magic); } if (size != header.Length) { throw new IOException ("Invalid header, size = " + size); } this.tocHashAlgorithm = (XarHashAlgorithm)cksum_alg; Stream s = new SizeLimiterStream (this.inputStream, toc_length_compressed); HashStream hs1 = null; switch (this.tocHashAlgorithm) { case XarHashAlgorithm.Sha1: hs1 = new HashStream (s, SHA1.Create ()); s = hs1; break; case XarHashAlgorithm.Md5: hs1 = new HashStream (s, MD5.Create ()); s = hs1; break; default: break; } Stream st = new InflaterInputStream ( s, new Inflater (false) ); this.ReadToc (st); st.Close (); this.Count ((int)toc_length_uncompressed); streamLength = this.Position; foreach (XarArchiveEntry e in entryList) { streamLength += e.Size; } if (this.tocHashAlgorithm != XarHashAlgorithm.None && this.tocHashOffset == 0) { byte[] h = new byte[this.tocHashSize]; int br = this.inputStream.Read (h, 0, h.Length); this.heapPosition += br; string ss = Dalle.Utilidades.UtilidadesCadenas.FormatHexHash (h).ToLower (); if (!ss.Equals(hs1.Hash)){ throw new IOException("Invalid toc checksum : " + hs1.Hash + " expected " + ss); } } }
public XarArchiveEntry GetNextXarEntry() { if (dataStream != null) { while (this.Read (tmpBuffer) > 0) { } dataStream.Close (); } dataStream = null; if (entryCursor >= entryList.Count) { return null; } this.currentEntry = (XarArchiveEntry)entryList[entryCursor++]; if (this.currentEntry.IsDirectory) { return this.currentEntry; } Stream s = new SizeLimiterStream (this.inputStream, this.currentEntry.Length); if (this.currentEntry.ArchivedChecksum != null) { switch (this.currentEntry.HashAlgorithmArchived) { case XarHashAlgorithm.Md5: s = new HashStream (s, MD5.Create (), this.currentEntry.ArchivedChecksum); break; case XarHashAlgorithm.Sha1: s = new HashStream (s, SHA1.Create (), this.currentEntry.ArchivedChecksum); break; default: break; } } switch (this.currentEntry.Encoding) { case XarEncoding.None: dataStream = s; break; case XarEncoding.Gzip: dataStream = new InflaterInputStream (s, new Inflater (false)); break; case XarEncoding.Bzip2: dataStream = new BZip2InputStream (s); break; default: break; } if (this.currentEntry.ExtractedChecksum != null) { switch (this.currentEntry.HashAlgorithmExtracted) { case XarHashAlgorithm.Md5: dataStream = new HashStream (dataStream, MD5.Create (), this.currentEntry.ExtractedChecksum); break; case XarHashAlgorithm.Sha1: dataStream = new HashStream (dataStream, SHA1.Create (), this.currentEntry.ExtractedChecksum); break; default: break; } } return this.currentEntry; }