public static FileReceipt CreateDuplicate(FileReceipt original, ISubfile subfile) { FileReceipt receipt = original.MemberwiseClone() as FileReceipt; receipt.Subfile = subfile; return(receipt); }
//public SubfileHybrid(IDataSource source, string name, string archiveName, ArchiveFileType type) : base(source, name, archiveName, type) public SubfileHybrid(ISubfile baseSubfile) { BaseSubfile = baseSubfile; }
static Manager() { #if DEBUG System.Diagnostics.Debugger.Launch(); #endif string dllsPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, @"x86");//Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) var assemblies = new List <Assembly>(); foreach (string path in new DirectoryInfo(dllsPath).GetFiles("*.dll").Select(x => x.FullName)) { try { assemblies.Add(Assembly.LoadFile(path)); } catch (Exception ex) { } } /* * foreach (string path in new DirectoryInfo(AppDomain.CurrentDomain.BaseDirectory).GetFiles("*.dll").Select(x => x.FullName)) * { * try * { * assemblies.Add(Assembly.LoadFile(path)); * } * catch (Exception ex) * { * * } * }*/ AppDomain.CurrentDomain.AssemblyResolve += (sender, args) => { return(assemblies.First(x => args.Name == x.FullName)); }; Settings = Settings.Load(); foreach (string dir in Directory.EnumerateFiles(Settings.PPXLocation, "*.ppx", SearchOption.TopDirectoryOnly).OrderBy(x => x)) { var archive = new ExtendedArchive(dir); foreach (var file in archive.ArchiveFiles) { ISubfile subfile = SubfileFactory.Create(file, file.ArchiveName); FileCache[new FileEntry(subfile.ArchiveName.Replace(".pp", "").ToLower(), subfile.Name.ToLower())] = subfile; } using (FileStream fs = new FileStream(dir, FileMode.Open, FileAccess.Read, FileShare.Read)) using (BinaryReader br = new BinaryReader(fs)) { byte[] buffer = new byte[4096]; while (0 < br.Read(buffer, 0, 4096)) { } } LoadedArchives.Add(archive); } Dictionary <string, List <string> > ppf = new Dictionary <string, List <string> >(); foreach (var kv in FileCache) { if (!ppf.ContainsKey(kv.Key.Archive)) { ppf.Add(kv.Key.Archive, new List <string>()); } ppf[kv.Key.Archive].Add(kv.Key.File); } foreach (var arc in ppf) { File.WriteAllBytes(Settings.PlaceholdersLocation + "\\" + arc.Key + ".pp", Utility.CreateHeader(arc.Value)); } }
protected async Task AllocateBlocks(IProgress <string> ProgressStatus, IProgress <int> ProgressPercentage, uint startingID = 0) { uint ID = startingID; ProgressStatus.Report("First pass hash caching...\r\n"); await GenerateHashes(Files, ProgressPercentage); ProgressStatus.Report("Second pass writing...\r\n"); ProgressStatus.Report("Allocating chunks...\r\n"); ProgressPercentage.Report(0); //Create a LST chunk var lstChunk = new QueuedChunk(new List <ISubfile>(), ID++, DefaultCompression, 23); //bunch duplicate files together //going to assume OrderBy is a stable sort LinkedList <ISubfile> linkedSubfileList = new LinkedList <ISubfile>( Files .OrderBy(x => x.Name) //order by file name first .OrderBy(x => Path.GetExtension(x.Name) ?? x.Name)); //then we order by file type, preserving duplicate file order Dictionary <Md5Hash, LinkedListNode <ISubfile> > HashList = new Dictionary <Md5Hash, LinkedListNode <ISubfile> >(); var node = linkedSubfileList.First; while (node?.Next != null) { ISubfile file = node.Value; Md5Hash hash = file.Source.Md5; if (HashList.ContainsKey(hash)) { var nextNode = node.Next; var originalNode = HashList[hash]; linkedSubfileList.Remove(node); linkedSubfileList.AddAfter(originalNode, file); node = nextNode; } else { HashList.Add(hash, node); node = node.Next; } } var fileList = new Queue <ISubfile>(linkedSubfileList); ulong accumulatedSize = 0; var currentChunk = new QueuedChunk(new List <ISubfile>(), ID++, DefaultCompression, 23); Md5Hash?previousHash = null; while (fileList.Count > 0) { ISubfile file = fileList.Dequeue(); if (previousHash.HasValue && previousHash.Value == file.Source.Md5) { currentChunk.Subfiles.Add(file); continue; } previousHash = file.Source.Md5; accumulatedSize += file.Source.Size; if (file.Name.EndsWith(".lst")) { lstChunk.Subfiles.Add(file); continue; } if (file.Type == ArchiveFileType.WaveAudio || file.Type == ArchiveFileType.OpusAudio) { //non-compressable data, assign it and any duplicates to a new chunk List <ISubfile> opusFiles = new List <ISubfile>(); opusFiles.Add(file); byte[] md5Template = file.Source.Md5; //keep peeking and dequeuing until we find another file while (true) { if (fileList.Count == 0) { break; //no more files, need to stop } ISubfile next = fileList.Peek(); if (!Utility.CompareBytes( md5Template, next.Source.Md5)) { //we've stopped finding duplicates break; } opusFiles.Add(fileList.Dequeue()); } var tempChunk = new QueuedChunk(opusFiles, ID++, ArchiveChunkCompression.Uncompressed, 0); QueuedChunks.Add(tempChunk); continue; } if (file.Size + accumulatedSize > ChunkSizeLimit) { //cut off the chunk here QueuedChunks.Add(currentChunk); accumulatedSize = 0; //create a new chunk currentChunk = new QueuedChunk(new List <ISubfile>(), ID++, DefaultCompression, 23); } currentChunk.Subfiles.Add(file); } if (currentChunk.Subfiles.Count > 0) { QueuedChunks.Add(currentChunk); } if (lstChunk.Subfiles.Count > 0) { QueuedChunks.Add(lstChunk); } QueuedChunks.CompleteAdding(); }