public static void DumpToCsv(this IArchiveFileEntry file, string fileName) { using (var table = file.ToTable()) { table.DumpToCsv(fileName); } }
/// <summary> /// Returns a readable stream of file data /// </summary> /// <param name="fileEntry">The file entry to load</param> /// <returns></returns> public Stream OpenFileStream(IArchiveFileEntry fileEntry, bool handleCompression = true) { Stream baseStream = null; baseStream = ArchiveFile.Open(fileEntry) ?? CoreDataArchive?.Open(fileEntry) ?? OpenLocalFile(fileEntry.Path); return(handleCompression ? HandleCompression(fileEntry, baseStream) : baseStream); }
public async Task AppendAsync(Stream stream, IArchiveFileEntry fileEntry) { var filesystemTarget = GetEntryPath(fileEntry); if (ExistsWithMatchingHash(fileEntry, filesystemTarget)) { return; } Directory.CreateDirectory(Path.GetDirectoryName(filesystemTarget)); using (var fileStream = File.Open(filesystemTarget, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)) { var source = Archive.HandleCompression(fileEntry, stream); void ProgressCallback(long length, long progress) { var eventArgs = new ProgressUpdateEventArgs(fileEntry, progress, fileEntry.UncompressedSize); OnProgressUpdated(eventArgs); } try { await source.CopyToAsync(fileStream, ProgressCallback); } finally { if (source != stream) { source.Close(); source.Dispose(); } } } }
public int CompareTo(IArchiveFileEntry other) { if (other == null) { return(-1); } return(string.Compare(other.Hash, Hash, StringComparison.Ordinal)); }
private static void ExtractFile(Archive archive, IArchiveFileEntry file, string target, bool validate = true) { if (file == null) { Console.WriteLine("Skipping file: {0}, file as not found in {1}", Path.GetFileName(target), Path.GetFileName(archive.IndexFile.FileName)); return; } void WriteProgress(long length, long progress) { Console.CursorLeft = 0; Console.Write("{0} ", file.FileName); Console.Write(progress); if (length > 0) { Console.Write("/"); Console.Write(length); } } void ClearProgress() { Console.CursorLeft = 0; Console.Write("{0} DONE ", file.FileName); Console.WriteLine(); } using (var stream = archive.OpenFileStream(file)) using (var targetStream = File.Open(target, FileMode.Create, FileAccess.Write, FileShare.None)) { stream.CopyTo(targetStream, (length, progress) => WriteProgress(length, progress)); } ClearProgress(); var fileInfo = new FileInfo(target); Debug.Assert(fileInfo.Length == file.UncompressedSize); // This only works on uncompressed data. // For compressed data, it's the hash of the compressed data in the archive. //if (validate) //{ // using (var stream = File.Open(target, FileMode.Open, FileAccess.Read, FileShare.None)) // using (var sha1 = SHA1.Create()) // { // var hash = sha1.ComputeHash(stream); // Debug.Assert(hash.SequenceEqual(file.Hash)); // } //} //Console.WriteLine("EXTRACTED: {0}", Path.GetFileName(target)); }
/// <summary> /// Extract supplied <see cref="IArchiveFileEntry"/> from <see cref="Archive"/>. /// </summary> private static void ExtractFile(Archive archive, IArchiveFileEntry fileEntry) { string filePath = Path.Combine("tbl", fileEntry.FileName); using (Stream archiveStream = archive.OpenFileStream(fileEntry)) using (FileStream fileStream = File.OpenWrite(filePath)) { archiveStream.CopyTo(fileStream); } log.Info($"Extracted {fileEntry.FileName}..."); }
public async Task PatchSingleItem(IArchiveFileEntry file, CancellationToken cancellationToken) { OnProgress(new ProgressUpdateEventArgs(file, 0, 0)); if (await PatchWriter.Exists(file).ConfigureAwait(false)) { return; } int counter = 0; Stream fileData = null; List <Exception> exceptions = new List <Exception>(); do { try { fileData = await PatchSource.DownloadHashAsync((int)Index.RootIndex.BuildNumber, file.Hash, cancellationToken); } catch (Exception ex) { counter++; exceptions.Add(ex); } } while (fileData == null && counter < FileRetryCount); if (fileData == null) { throw new AggregateException(exceptions); } counter = 0; do { try { await PatchWriter.AppendAsync(fileData, file).ConfigureAwait(false); break; } catch (Exception ex) { exceptions.Add(ex); counter++; } } while (counter < FileRetryCount); if (counter >= FileRetryCount) { throw new AggregateException(exceptions); } }
internal static Stream HandleCompression(IArchiveFileEntry fileEntry, Stream baseStream) { switch (fileEntry.Flags & (ArchiveFileFlags.CompressedLzma | ArchiveFileFlags.CompressedDeflate)) { case ArchiveFileFlags.CompressedLzma: var properties = new byte[5]; baseStream.Read(properties, 0, properties.Length); return(new LzmaStream(properties, baseStream, fileEntry.CompressedSize - properties.Length, fileEntry.UncompressedSize)); case ArchiveFileFlags.CompressedDeflate: return(new DeflateStream(baseStream, CompressionMode.Decompress, false)); default: return(baseStream); } }
/// <summary> /// Generate a base map (.nfmap) file from supplied <see cref="WorldEntry"/>. /// </summary> private static void ProcessWorld(WorldEntry entry, byte?gridX = null, byte?gridY = null) { var mapFile = new WritableMapFile(Path.GetFileName(entry.AssetPath)); log.Info($"Processing {mapFile.Asset}..."); if (gridX.HasValue && gridY.HasValue) { string path = Path.Combine(entry.AssetPath, $"{mapFile.Asset}.{gridX:x2}{gridY:x2}.area"); IArchiveFileEntry grid = ArchiveManager.MainArchive.GetFileInfoByPath(path); if (grid != null) { ProcessGrid(mapFile, grid, gridX.Value, gridY.Value); } } else { string path = Path.Combine(entry.AssetPath, "*.*.area"); foreach (IArchiveFileEntry grid in ArchiveManager.MainArchive.IndexFile.GetFiles(path)) { Regex regex = new Regex(@"[\w]+\.([A-Fa-f0-9]{2})([A-Fa-f0-9]{2})\.area"); Match match = regex.Match(grid.FileName); byte x = byte.Parse(match.Groups[1].Value, NumberStyles.HexNumber); byte y = byte.Parse(match.Groups[2].Value, NumberStyles.HexNumber); ProcessGrid(mapFile, grid, x, y); } } // FIXME: this happens for worlds with no terrain information, this is usually an instance where props are used as terrain if (!mapFile.Any()) { log.Info($"Map {mapFile.Asset} has no grid information, skipping"); return; } // Path.ChangeExtension(mapFile.Asset, "nfmap") // ChangeExtension doesn't behave correctly on linux string filePath = Path.Combine("map", $"{mapFile.Asset}.nfmap"); using (FileStream stream = File.Create(filePath)) using (var writer = new BinaryWriter(stream)) { mapFile.Write(writer); } }
private static void ProcessGrid(WritableMapFile map, IArchiveFileEntry grid, byte gridX, byte gridY) { // skip any low quality grids if (grid.FileName.Contains("_low", StringComparison.OrdinalIgnoreCase)) { return; } log.Info($"Processing {map.Asset} grid {gridX},{gridY}..."); using (Stream stream = ArchiveManager.MainArchive.OpenFileStream(grid)) { try { var mapFileGrid = new WritableMapFileGrid(gridX, gridY); var areaFile = new AreaFile(stream); foreach (IReadable areaChunk in areaFile.Chunks) { switch (areaChunk) { case Chnk chnk: { foreach (ChnkCell cell in chnk.Cells.Where(c => c != null)) { mapFileGrid.AddCell(new WritableMapFileCell(cell)); } break; } } } map.SetGrid(gridX, gridY, mapFileGrid); } catch (Exception e) { log.Error(e); } } }
private bool ExistsWithMatchingHash(IArchiveFileEntry entry, string path) { if (CoreData?.GetFileDataEntryByHash(entry.Hash) != null) { return(true); } if (!File.Exists(path)) { return(false); } using (var fileStream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { using (var sha1 = SHA1.Create()) { var hash = sha1.ComputeHash(fileStream); if (hash.SequenceEqual(entry.Hash)) { return(true); } } } return(false); }
public static M3Model ToModel(this IArchiveFileEntry file) { return(new M3Model(file.Open(), file.Name)); }
public Stream Open(IArchiveFileEntry fileEntry) { var dataEntry = GetFileDataEntryByHash(fileEntry.Hash); return(Open(dataEntry)); }
private void RemoveStopwatch(IArchiveFileEntry fileEntry) { _stopWatches.TryRemove(fileEntry.Hash.ToHexString(), out _); }
private Stopwatch GetStopwatch(IArchiveFileEntry fileEntry) { return(_stopWatches.GetOrAdd(fileEntry.Hash.ToHexString(), _ => Stopwatch.StartNew())); }
public Task AppendAsync(byte[] data, IArchiveFileEntry fileEntry) { return(AppendAsync(new MemoryStream(data), fileEntry)); }
private bool ExistsWithMatchingHash(IArchiveFileEntry entry) { return(ExistsWithMatchingHash(entry, GetEntryPath(entry))); }
private string GetEntryPath(IArchiveFileEntry entry) { return(Path.Combine(Target.FullName, entry.Path)); }
public Task <bool> Exists(IArchiveFileEntry fileEntry) { return(Task.FromResult(ExistsWithMatchingHash(fileEntry))); }
public ProgressUpdateEventArgs(IArchiveFileEntry fileEntry, long bytesWritten, long length) { FileEntry = fileEntry; BytesWritten = bytesWritten; Length = length; }
public static WildstarTable ToTable(this IArchiveFileEntry fileEntry) { return(new WildstarTable(fileEntry.Open())); }