private void Inject(ArchiveEntry entry, String sourcePath) { string sourceExtension = PathEx.GetMultiDotComparableExtension(entry.Name); string sourceFullPath = sourcePath + sourceExtension; IArchiveEntryInjector injector; if (_injectors.TryGetValue(sourceExtension, out injector)) { sourceFullPath = sourcePath + injector.SourceExtension; if (injector.TryInject(_source, sourceFullPath, _injectionData, entry)) { _injected = true; return; } } if (_conversion != true) { if (DefaultInjector.TryInject(_source, sourceFullPath, _injectionData, entry)) { _injected = true; } } }
public void Extract(ArchiveEntry entry, StreamSequence output, Stream input, Byte[] buff) { int size = (int)entry.UncompressedSize; if (size == 0) { return; } ScdFileReader reader = new ScdFileReader(input); WaveStream[] waveStreams = reader.Read(); if (waveStreams.Length == 0) { return; } Extract(output, waveStreams[0], buff); for (int i = 1; i < waveStreams.Length; i++) { if (!output.TryCreateNextStream(i.ToString("D3"))) { throw new InvalidDataException(); } Extract(output, waveStreams[i], buff); } }
public void Test003(string archiveName, int iterations) { var fileName = Path.Combine(Location, "Media", archiveName); var count = Utils.GetEntryCount(fileName); Utils.Iterations(a => { for (var b = 0; b < count; b++) { var entry = default(IntPtr); if (!ArchiveEntry.OpenEntry(fileName, b, out entry)) { Assert.Fail("Failed to open entry."); } try { Utils.GetEntryHashCode(entry); Assert.IsTrue(ArchiveEntry.IsEOF(entry)); } finally { ArchiveEntry.CloseEntry(entry); } } }, iterations); }
/// <summary> /// History is made of simple strings that represent enough information to load a file: /// - The picture full name /// - The archive file (if any) from which the file derives /// </summary> /// <param name="tre"></param> public bool LoadPicture(TextRepresentationEntry tre) { if (tre.ArchiveFile == String.Empty) { //it's just a regular file, go load it return(LoadPicture(tre.FullName)); } else { if (File.Exists(tre.ArchiveFile)) { List <ArchiveEntry> imagesFiles = ArchiveEntry.GetImageFiles(tre.ArchiveFile); if (imagesFiles.Count > 0) { int index = imagesFiles.FindIndex(x => x.InternalArchiveFullName.Equals(tre.FullName)); //check if image still exist inside this archive. Could have been deleted for whatever reason if (index != -1) { directoryIndex = index; entries = imagesFiles.ToList <IEntry>(); return(loadPicture(entries[index])); } } } } return(false); }
public IHttpActionResult PutArchiveEntry(Guid id, ArchiveEntry archiveEntry) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } if (id != archiveEntry.Id) { return(BadRequest()); } db.Entry(archiveEntry).State = EntityState.Modified; try { db.SaveChanges(); } catch (DbUpdateConcurrencyException) { if (!ArchiveEntryExists(id)) { return(NotFound()); } else { throw; } } return(StatusCode(HttpStatusCode.NoContent)); }
/// <inheritdoc/> public virtual Collection <PackageDependency> DetermineProvides(ArchiveEntry entry) { // For now, report no provides at all. Could be enhanced if ELF parsing is available. var dependencies = new Collection <PackageDependency>(); return(dependencies); }
public IHttpActionResult PostArchiveEntry(CreateArchiveEntryModel archiveEntry) { var userId = this.User.Identity.GetUserId(); if (!ModelState.IsValid) { return(BadRequest(ModelState)); } var newEntry = new ArchiveEntry(); newEntry.FirstName = archiveEntry.FirstName; newEntry.MiddleName = archiveEntry.MiddleName; newEntry.LastName = archiveEntry.LastName; newEntry.Suffix = archiveEntry.Suffix; newEntry.UserId = userId; db.ArchiveEntries.Add(newEntry); try { db.SaveChanges(); } catch (Exception ex) { return(InternalServerError(ex)); } return(Created(@"~\Archive", newEntry)); }
public ArchiveWriter(string archive_path, ArchiveEntry[] arc_entries) { InitializeComponent(); bool arc_dir_valid = false; bool entries_valid = ArchiveEntry.CheckEntries(arc_entries); if (archive_path != null && System.IO.Directory.Exists(archive_path)) { if (!archive_path.EndsWith("\\")) { archive_path += "\\"; } arc_dir_valid = true; } if (arc_dir_valid && entries_valid) { this.ArchiveDirectory = archive_path; this.ArchiveEntries = arc_entries; } else { MessageBox.Show("Archive creator wrong params, cannot continue...", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Close(); } }
/** * Begins writing a new CPIO file entry and positions the stream to the * start of the entry data. Closes the current entry if still active. The * current time will be used if the entry has no set modification time and * the default header format will be used if no other format is specified in * the entry. * * @param entry * the CPIO cpioEntry to be written * @throws IOException * if an I/O error has occurred or if a CPIO file error has * occurred * @throws ClassCastException if entry is not an instance of CpioArchiveEntry */ public override void putArchiveEntry(ArchiveEntry entry) //throws IOException { if (finished) { throw new java.io.IOException("Stream has already been finished"); } CpioArchiveEntry e = (CpioArchiveEntry)entry; ensureOpen(); if (this.entry != null) { closeArchiveEntry(); // close previous entry } if (e.getTime() == -1) { e.setTime(java.lang.SystemJ.currentTimeMillis() / 1000); } short format = e.getFormat(); if (format != this.entryFormat) { throw new java.io.IOException("Header format: " + format + " does not match existing format: " + this.entryFormat); } if (this.names.put(e.getName(), e) != null) { throw new java.io.IOException("duplicate entry: " + e.getName()); } writeHeader(e); this.entry = e; this.written = 0; }
private void Inject(ArchiveListing listing, ArchiveEntry entry, Dictionary <String, String> sourceEntries, Stream output) { ZtrFileType type = ZtrFileType.LittleEndianUncompressedDictionary; ZtrFileEntry[] targetEntries; using (Stream original = listing.Accessor.ExtractBinary(entry)) { ZtrFileUnpacker unpacker = new ZtrFileUnpacker(original, InteractionService.TextEncoding.Provide().Encoding); targetEntries = unpacker.Unpack(); if (InteractionService.GamePart == FFXIIIGamePart.Part2) { if (entry.Name.StartsWith("txtres/resident/system/txtres_", StringComparison.InvariantCultureIgnoreCase)) { type = ZtrFileType.BigEndianCompressedDictionary; } } } MergeEntries(sourceEntries, targetEntries); ZtrFilePacker packer = new ZtrFilePacker(output, InteractionService.TextEncoding.Provide().Encoding, type); packer.Pack(targetEntries); }
/** * Copies the ArchiveEntry to the Output stream * * @param in * the stream to read the data from * @param out * the stream to write the data to * @param entry * the entry to write * @throws IOException * if data cannot be read or written */ private void copyStream(java.io.InputStream inJ, ArchiveOutputStream outJ, ArchiveEntry entry) //throws IOException { outJ.putArchiveEntry(entry); IOUtils.copy(inJ, outJ); outJ.closeArchiveEntry(); }
public void Test005(string archiveName, string entryPath, string password, long length, int hashCode) { var fileName = Path.Combine(Location, "Media", archiveName); var index = Utils.GetEntryIndex(archiveName, entryPath); Utils.PasswordHandler.Set(fileName, password, 5000); var entry = default(IntPtr); if (!ArchiveEntry.OpenEntry(fileName, index, out entry)) { Assert.Fail("Failed to open entry."); } try { Assert.AreEqual(0, ArchiveEntry.GetEntryPosition(entry)); Assert.AreEqual(length, ArchiveEntry.GetEntryLength(entry)); Assert.AreEqual(hashCode, Utils.GetEntryHashCode(entry)); Assert.AreEqual(length, ArchiveEntry.GetEntryPosition(entry)); Assert.IsTrue(ArchiveEntry.IsEOF(entry)); } finally { ArchiveEntry.CloseEntry(entry); } }
public ArchiveEntry GetNextEntry() { ArchiveEntry archiveEntry = null; if (Stream is ZipInputStream) { var zipEntry = ((ZipInputStream)Stream).GetNextEntry(); if (zipEntry != null) { archiveEntry = new ArchiveEntry(zipEntry); } } else if (Stream is TarInputStream) { var tarEntry = ((TarInputStream)Stream).GetNextEntry(); if (tarEntry != null) { archiveEntry = new ArchiveEntry(tarEntry); } } return(archiveEntry); }
public void Test007(string archiveName, string password) { var fileName = Path.Combine(Location, "Media", archiveName); var count = Utils.GetEntryCount(fileName); Utils.PasswordHandler.Set(fileName, password, 5000); Utils.ParallelIterations(a => { var entry = default(IntPtr); var isFile = default(bool); Utils.GetEntryType(archiveName, a, out isFile); var isOpen = ArchiveEntry.OpenEntry(fileName, a, out entry); Assert.AreEqual(isFile, isOpen); try { if (isFile) { Utils.GetEntryHashCode(entry); Assert.IsTrue(ArchiveEntry.IsEOF(entry)); } } finally { ArchiveEntry.CloseEntry(entry); } }, count); }
public void Test001(string archiveName, string entryPath, long length, int hashCode) { var fileName = Path.Combine(Location, "Media", archiveName); var index = Utils.GetEntryIndex(archiveName, entryPath); var entry = default(IntPtr); if (!ArchiveEntry.OpenEntry(fileName, index, out entry)) { Assert.Fail("Failed to open entry."); } try { Assert.AreEqual(0, ArchiveEntry.GetEntryPosition(entry)); Assert.AreEqual(length, ArchiveEntry.GetEntryLength(entry)); Assert.AreEqual(hashCode, Utils.GetEntryHashCode(entry)); Assert.AreEqual(length, ArchiveEntry.GetEntryPosition(entry)); Assert.IsTrue(ArchiveEntry.IsEOF(entry)); //This just checks GetEntryHashCode2 works, it uses ARCHIVE_ReadEntry2. Assert.IsTrue(ArchiveEntry.SeekEntry(entry, 0)); Assert.AreEqual(hashCode, Utils.GetEntryHashCode2(entry)); Assert.AreEqual(length, ArchiveEntry.GetEntryPosition(entry)); Assert.IsTrue(ArchiveEntry.IsEOF(entry)); } finally { ArchiveEntry.CloseEntry(entry); } }
public UiDataTableNode(ArchiveListing listing, UiArchiveExtension extension, ArchiveEntry indices) : base(indices.Name, UiNodeType.DataTable) { _listing = listing; _extension = extension; _indices = indices; }
private void menuItemView_Click(object sender, EventArgs e) { if (!isLoaded()) { return; } if (lvFiles.SelectedItems.Count == 0) { return; } ArchiveEntry ent = m_Archive.getByName(Path.GetFileNameWithoutExtension(lvFiles.SelectedItems[0].Text)); if (ent == null) { return; } ent.extractTo(Path.GetTempPath()); string file = String.Format("{0}\\{1}{2}", Path.GetTempPath(), ent.getName(), ent.getExtension()); Process p = Process.Start(file); if (p != null) { p.WaitForExit(); } File.Delete(file); }
/** {@inheritDoc} */ public override void putArchiveEntry(ArchiveEntry pEntry) //throws IOException { if (finished) { throw new java.io.IOException("Stream has already been finished"); } ArArchiveEntry pArEntry = (ArArchiveEntry)pEntry; if (prevEntry == null) { archiveOffset += writeArchiveHeader(); } else { if (prevEntry.getLength() != entryOffset) { throw new java.io.IOException("length does not match entry (" + prevEntry.getLength() + " != " + entryOffset); } if (haveUnclosedEntry) { closeArchiveEntry(); } } prevEntry = pArEntry; archiveOffset += writeEntryHeader(pArEntry); entryOffset = 0; haveUnclosedEntry = true; }
private static void Bogocrypt1(ArchiveEntry entry, Stream input, Stream output) { var key = entry.BogocryptKey; long remaining = entry.Length; var block = new byte[1024]; while (remaining > 0) { var blockLength = (int)Math.Min(block.Length, remaining + 3 & ~3); var actualBlockLength = (int)Math.Min(block.Length, remaining); if (blockLength == 0) { throw new InvalidOperationException(); } if (input.Read(block, 0, blockLength) < actualBlockLength) { throw new EndOfStreamException(); } key = ArchiveFile.Bogocrypt1(block, 0, blockLength, key); output.Write(block, 0, actualBlockLength); remaining -= blockLength; } }
protected void AddDirectory(string directory, string relativePath, string prefix, List <ArchiveEntry> value, ITaskItem[] metadata) { // Write out an entry for the current directory // (actually, this is _not_ done) ArchiveEntry directoryEntry = new ArchiveEntry() { FileSize = 0x00001000, Sha256 = Array.Empty <byte>(), Mode = LinuxFileMode.S_IXOTH | LinuxFileMode.S_IROTH | LinuxFileMode.S_IXGRP | LinuxFileMode.S_IRGRP | LinuxFileMode.S_IXUSR | LinuxFileMode.S_IWUSR | LinuxFileMode.S_IRUSR | LinuxFileMode.S_IFDIR, Modified = Directory.GetLastWriteTime(directory), Group = "root", Owner = "root", Inode = this.inode++ }; // The order in which the files appear in the cpio archive is important; if this is not respected xzdio // will report errors like: // error: unpacking of archive failed on file ./usr/share/quamotion/mscorlib.dll: cpio: Archive file not in header var entries = Directory.GetFileSystemEntries(directory).OrderBy(e => Directory.Exists(e) ? e + "/" : e, StringComparer.Ordinal).ToArray(); foreach (var entry in entries) { if (File.Exists(entry)) { this.AddFile(entry, relativePath + Path.GetFileName(entry), prefix, value, metadata); } else { this.AddDirectory(entry, relativePath + Path.GetFileName(entry) + "/", prefix + "/" + Path.GetFileName(entry), value, metadata); } } }
public Collection <ArchiveEntry> FromLinuxFolders(ITaskItem[] metadata) { Collection <ArchiveEntry> value = new Collection <ArchiveEntry>(); // This can be null if the user did not define any folders. // In that case: nothing to do. if (metadata != null) { foreach (var folder in metadata) { var path = folder.ItemSpec.Replace("\\", "/"); // Write out an entry for the current directory // (actually, this is _not_ done) ArchiveEntry directoryEntry = new ArchiveEntry() { FileSize = 0x00001000, Sha256 = Array.Empty <byte>(), Mode = LinuxFileMode.S_IXOTH | LinuxFileMode.S_IROTH | LinuxFileMode.S_IXGRP | LinuxFileMode.S_IRGRP | LinuxFileMode.S_IXUSR | LinuxFileMode.S_IWUSR | LinuxFileMode.S_IRUSR | LinuxFileMode.S_IFDIR, Modified = DateTime.Now, Group = folder.GetGroup(), Owner = folder.GetOwner(), Inode = this.inode++, TargetPath = path, LinkTo = string.Empty, RemoveOnUninstall = folder.GetRemoveOnUninstall() }; value.Add(directoryEntry); } } return(value); }
private bool TryAddImgbPair(ArchiveListing listing, ArchiveEntry entry, string entryPath, string entryName) { string ext = PathEx.GetMultiDotComparableExtension(entryName); switch (ext) { case ".win32.xfv": case ".win32.xgr": case ".win32.xwb": break; case ".win32.trb": break; case ".win32.imgb": break; default: return(false); } string longName = entryPath.Substring(0, entryPath.Length - ext.Length); if (IsUnexpectedEntry(listing.Name, longName)) { return(false); } return(SetPairedEntry(listing, entry, ext, longName)); }
public UiFileTableNode(ArchiveListing listing, UiArchiveExtension extension, ArchiveEntry indices, ArchiveEntry binary) : base(indices.Name, UiNodeType.FileTable) { _listing = listing; _extension = extension; _indices = indices; _binary = binary; }
/** * Whether this stream is able to write the given entry. * * <p>May return false if it is set up to use encryption or a * compression method that hasn't been implemented yet.</p> * @since Apache Commons Compress 1.1 */ public override bool canWriteEntryData(ArchiveEntry ae) { if (ae is ZipArchiveEntry) { return(ZipUtil.canHandleEntryData((ZipArchiveEntry)ae)); } return(false); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in C#: //ORIGINAL LINE: void addToArchive(org.apache.commons.compress.archivers.ArchiveOutputStream stream) throws java.io.IOException internal virtual void AddToArchive(ArchiveOutputStream stream) { ArchiveEntry entry = CreateEntry(File, Root, stream); stream.putArchiveEntry(entry); Operation.accept(stream); stream.closeArchiveEntry(); }
public void Open() { if (!ArchiveEntry.OpenEntry(this.FileName, this.Index, out this.Entry)) { return; } this.ReadStream = new ArchiveEntryStream(this.Entry); this.IsOpen = true; }
// @throws ClassCastException if entry is not an instance of ZipArchiveEntry public override void putArchiveEntry(ArchiveEntry ze) //throws IOException { if (!jarMarkerAdded) { ((ZipArchiveEntry)ze).addAsFirstExtraField(JarMarker.getInstance()); jarMarkerAdded = true; } base.putArchiveEntry(ze); }
/// <summary> /// 提取为流格式 /// </summary> /// <param name="source">压缩源</param> /// <param name="path">关键字</param> /// <returns></returns> private Stream ExtractToStream(SevenZipArchive source, string path) { ArchiveEntry entry = source[path]; MemoryStream stream = new MemoryStream(); entry.Extract(stream); stream.Seek(0, SeekOrigin.Begin); return(stream); }
private UiArchiveExtension GetArchiveExtension(ArchiveEntry indices) { string ext = PathEx.GetMultiDotComparableExtension(indices.Name); const string extensionPrefix = ".win32."; ext = ext.Substring(extensionPrefix.Length); return(EnumCache <UiArchiveExtension> .Parse(ext)); }
/// <inheritdoc/> public override RpmFileColor DetermineColor(ArchiveEntry entry) { switch (entry.TargetPath) { case "/usr/share/doc/libplist-2.0.1.151/AUTHORS": return(RpmFileColor.RPMFC_BLACK); } return(base.DetermineColor(entry)); }
public override void PutArchiveEntry(ArchiveEntry entry) { if (finished) { throw new IOException ("Stream has already been finished"); } CpioArchiveEntry e = (CpioArchiveEntry)entry; EnsureOpen (); if (this.entry != null) { CloseArchiveEntry (); // close previous entry } if (e.Time == -1) { e.Time = DateUtils.CurrentUnixTimeMillis; } short format = e.Format; if (format != this.entryFormat) { throw new IOException ("Header format: " + format + " does not match existing format: " + this.entryFormat); } // TODO Comprobar duplicados /* if (this.names.put(e.getName(), e) != null) { throw new IOException("duplicate entry: " + e.getName()); } */ WriteHeader (e); this.entry = e; this.written = 0; }
private ArchiveEntry GetNextEntryZip () { ZipInputStream zip_stream = (ZipInputStream) archive_stream; ZipEntry zip_entry; do { zip_entry = zip_stream.GetNextEntry (); } while (zip_entry != null && zip_entry.IsDirectory); // End of the entries. if (zip_entry == null) return null; ArchiveEntry entry = new ArchiveEntry (); entry.Name = zip_entry.Name; entry.Modified = zip_entry.DateTime; // FIXME: Not sure zip_entry.DateTime is UTC. entry.Comment = zip_entry.Comment; entry.Size = zip_entry.Size; // Only index smaller subfiles, to avoid filling /tmp if (entry.Size > MAX_SINGLE_FILE) { Log.Debug ("Skipping over large file {0} in {1}", entry.Name, Indexable.DisplayUri.ToString ()); return entry; } entry.TempFile = StoreStreamInTempFile (archive_stream, GetExtension (entry.Name), entry.Modified); if (entry.TempFile != null) entry.MimeType = XdgMime.GetMimeType (entry.TempFile); return entry; }
public override void PutArchiveEntry(ArchiveEntry pEntry) { if (finished) { throw new IOException ("Stream has already been finished"); } ArArchiveEntry pArEntry = (ArArchiveEntry)pEntry; if (currentEntry == null) { // First entry WriteArchiveHeader (); } else { if (currentEntry.Size != entryOffset) { throw new IOException ("length does not match entry (" + currentEntry.Size + " != " + entryOffset); } if (haveUnclosedEntry) { CloseArchiveEntry (); } } currentEntry = pArEntry; WriteEntryHeader (pArEntry); entryOffset = 0; haveUnclosedEntry = true; }
public abstract void PutArchiveEntry(ArchiveEntry entry);
private ArchiveEntry GetNextEntryTar () { TarInputStream tar_stream = (TarInputStream) archive_stream; TarEntry tar_entry; do { tar_entry = tar_stream.GetNextEntry (); } while (tar_entry != null && tar_entry.IsDirectory); // End of the entries; if (tar_entry == null) return null; ArchiveEntry entry = new ArchiveEntry (); entry.Name = tar_entry.Name; entry.Modified = tar_entry.ModTime; entry.Size = tar_entry.Size; // Only index smaller subfiles, to avoid filling /tmp if (entry.Size > MAX_SINGLE_FILE) { Log.Debug ("Skipping over large file {0} in {1}", entry.Name, Indexable.DisplayUri.ToString ()); return entry; } entry.TempFile = StoreStreamInTempFile (archive_stream, GetExtension (entry.Name), entry.Modified); if (entry.TempFile != null) entry.MimeType = XdgMime.GetMimeType (entry.TempFile); return entry; }
private ArchiveEntry GetNextEntrySingle () { if (handled_single) return null; ArchiveEntry entry = new ArchiveEntry (); // If there is a ExactFilename property, get name using that, // else use the FileInfo name string exact_filename = null; foreach (Property p in Indexable.Properties) if (p.Key == Property.ExactFilenamePropKey) { exact_filename = p.Value; break; } if (exact_filename != null) entry.Name = Path.GetFileNameWithoutExtension (exact_filename); else entry.Name = Path.GetFileNameWithoutExtension (FileInfo.Name); entry.Modified = FileInfo.LastWriteTimeUtc; entry.TempFile = StoreStreamInTempFile (archive_stream, GetExtension (entry.Name), entry.Modified); if (entry.TempFile != null) { entry.Size = new FileInfo (entry.TempFile).Length; entry.MimeType = XdgMime.GetMimeType (entry.TempFile); } handled_single = true; return entry; }
private void ExtractFile(ArchiveEntry entry) { if (entry.IsFolder)return; string dirName = Path.GetDirectoryName(entry.Path); if (dirName == null)return; Directory.CreateDirectory(Path.Combine(extractPath, dirName)); using(var sourceStream = entry.CreateStream()){ using(var fileStream = new FileStream(Path.Combine(extractPath, entry.Path), FileMode.OpenOrCreate, FileAccess.Write, FileShare.None)){ sourceStream.Seek(0, SeekOrigin.Begin); sourceStream.CopyTo(fileStream); } } }