Ejemplo n.º 1
0
Archivo: Md5Hash.cs Proyecto: aa2g/PPeX
        public override bool Equals(object obj)
        {
            if (obj is Md5Hash hash)
            {
                return(Utility.CompareBytes(Hash, hash.Hash));
            }

            if (obj is byte[] bytes)
            {
                return(Utility.CompareBytes(Hash, bytes));
            }

            if (obj is string s)
            {
                return(Equals((Md5Hash)s));
            }

            return(false);
        }
Ejemplo n.º 2
0
 public bool Equals(byte[] x, byte[] y)
 {
     return(Utility.CompareBytes(x, y));
 }
Ejemplo n.º 3
0
        protected async Task AllocateBlocks(IProgress <string> ProgressStatus, IProgress <int> ProgressPercentage, uint startingID = 0)
        {
            uint ID = startingID;

            ProgressStatus.Report("First pass hash caching...\r\n");

            await GenerateHashes(Files, ProgressPercentage);


            ProgressStatus.Report("Second pass writing...\r\n");

            ProgressStatus.Report("Allocating chunks...\r\n");
            ProgressPercentage.Report(0);

            //Create a LST chunk
            var lstChunk = new QueuedChunk(new List <ISubfile>(), ID++, DefaultCompression, 23);

            //bunch duplicate files together
            //going to assume OrderBy is a stable sort
            LinkedList <ISubfile> linkedSubfileList = new LinkedList <ISubfile>(
                Files
                .OrderBy(x => x.Name)                                //order by file name first
                .OrderBy(x => Path.GetExtension(x.Name) ?? x.Name)); //then we order by file type, preserving duplicate file order

            Dictionary <Md5Hash, LinkedListNode <ISubfile> > HashList = new Dictionary <Md5Hash, LinkedListNode <ISubfile> >();

            var node = linkedSubfileList.First;

            while (node?.Next != null)
            {
                ISubfile file = node.Value;
                Md5Hash  hash = file.Source.Md5;

                if (HashList.ContainsKey(hash))
                {
                    var nextNode = node.Next;

                    var originalNode = HashList[hash];

                    linkedSubfileList.Remove(node);
                    linkedSubfileList.AddAfter(originalNode, file);

                    node = nextNode;
                }
                else
                {
                    HashList.Add(hash, node);

                    node = node.Next;
                }
            }

            var fileList = new Queue <ISubfile>(linkedSubfileList);

            ulong accumulatedSize = 0;

            var     currentChunk = new QueuedChunk(new List <ISubfile>(), ID++, DefaultCompression, 23);
            Md5Hash?previousHash = null;

            while (fileList.Count > 0)
            {
                ISubfile file = fileList.Dequeue();

                if (previousHash.HasValue && previousHash.Value == file.Source.Md5)
                {
                    currentChunk.Subfiles.Add(file);
                    continue;
                }

                previousHash = file.Source.Md5;

                accumulatedSize += file.Source.Size;

                if (file.Name.EndsWith(".lst"))
                {
                    lstChunk.Subfiles.Add(file);
                    continue;
                }

                if (file.Type == ArchiveFileType.WaveAudio || file.Type == ArchiveFileType.OpusAudio)
                {
                    //non-compressable data, assign it and any duplicates to a new chunk

                    List <ISubfile> opusFiles = new List <ISubfile>();

                    opusFiles.Add(file);

                    byte[] md5Template = file.Source.Md5;

                    //keep peeking and dequeuing until we find another file
                    while (true)
                    {
                        if (fileList.Count == 0)
                        {
                            break; //no more files, need to stop
                        }
                        ISubfile next = fileList.Peek();

                        if (!Utility.CompareBytes(
                                md5Template,
                                next.Source.Md5))
                        {
                            //we've stopped finding duplicates
                            break;
                        }

                        opusFiles.Add(fileList.Dequeue());
                    }

                    var tempChunk = new QueuedChunk(opusFiles, ID++, ArchiveChunkCompression.Uncompressed, 0);
                    QueuedChunks.Add(tempChunk);

                    continue;
                }

                if (file.Size + accumulatedSize > ChunkSizeLimit)
                {
                    //cut off the chunk here
                    QueuedChunks.Add(currentChunk);

                    accumulatedSize = 0;

                    //create a new chunk
                    currentChunk = new QueuedChunk(new List <ISubfile>(), ID++, DefaultCompression, 23);
                }

                currentChunk.Subfiles.Add(file);
            }

            if (currentChunk.Subfiles.Count > 0)
            {
                QueuedChunks.Add(currentChunk);
            }

            if (lstChunk.Subfiles.Count > 0)
            {
                QueuedChunks.Add(lstChunk);
            }

            QueuedChunks.CompleteAdding();
        }