Exemple #1
0
        public static void Main(string[] args)
        {
            Logger.Instance = ConsoleLogger.Instance;

            if (args.Length == 0)
            {
                Console.WriteLine("No arguments");
                Console.ReadKey();
                return;
            }

            foreach (string arg in args)
            {
                if (MultiFileStream.Exists(arg))
                {
                    continue;
                }
                if (DirectoryUtils.Exists(arg))
                {
                    continue;
                }
                Console.WriteLine(MultiFileStream.IsMultiFile(arg) ?
                                  $"File '{arg}' doesn't has all parts for combining" :
                                  $"Neither file nor directory with path '{arg}' exists");
                Console.ReadKey();
                return;
            }

            Program program = new Program();

            program.Load(args);
            Console.ReadKey();
        }
        // =====================================================
        // Methods
        // =====================================================

        private bool ProcessInputFiles(string[] files)
        {
            if (files.Length == 0)
            {
                return(false);
            }

            foreach (string file in files)
            {
                if (MultiFileStream.Exists(file))
                {
                    continue;
                }
                if (DirectoryUtils.Exists(file))
                {
                    continue;
                }
                Logger.Log(LogType.Warning, LogCategory.General, MultiFileStream.IsMultiFile(file) ?
                           $"File '{file}' doesn't have all parts for combining" :
                           $"Neither file nor directory with path '{file}' exists");
                return(false);
            }

            IntroText.Text    += "Loading files...";
            MainGrid.AllowDrop = false;
            m_processingFiles  = files;

            ThreadPool.QueueUserWorkItem(new WaitCallback(LoadFiles), files);
            return(true);
        }
Exemple #3
0
        private void compressFilesSolid(ulong numStreams, Dictionary <ulong, ulong> streamToFileIndex, SevenZipProgressProvider progressProvider)
        {
            var sc = new SevenZipStreamsCompressor(stream);

            sc.Method = Compression.Registry.Method.LZMA;

            Trace.TraceInformation($"Compressing `{numStreams} files` into a solid block...");

            // actual compression using a sequence file stream and stream compressor
            var inputStream = new MultiFileStream(
                FileAccess.Read,
                streamToFileIndex.Select(sfi => _Files[(int)sfi.Value].Source).ToArray());

            SevenZipStreamsCompressor.PackedStream cs = sc.Compress(inputStream, progressProvider);

            // build headers
            var streamsInfo = this.header.RawHeader.MainStreamsInfo;

            streamsInfo.PackInfo = new SevenZipHeader.PackInfo()
            {
                NumPackStreams = cs.NumStreams,
                PackPos        = 0,
                Sizes          = cs.Sizes,
                Digests        = new SevenZipHeader.Digests(cs.NumStreams)
                {
                    CRCs = cs.CRCs
                }
            };
            streamsInfo.UnPackInfo = new SevenZipHeader.UnPackInfo()
            {
                NumFolders = 1,
                Folders    = new SevenZipHeader.Folder[1]
                {
                    cs.Folder
                }
            };
            streamsInfo.UnPackInfo.Folders[0].UnPackCRC = null;
            streamsInfo.SubStreamsInfo = new SevenZipHeader.SubStreamsInfo(streamsInfo.UnPackInfo)
            {
                NumUnPackStreamsInFolders = new UInt64[1]
                {
                    numStreams
                },
                NumUnPackStreamsTotal = numStreams,
                UnPackSizes           = new List <UInt64>((int)numStreams),
                Digests = new SevenZipHeader.Digests(numStreams)
            };
            for (ulong i = 0; i < numStreams; ++i)
            {
                streamsInfo.SubStreamsInfo.UnPackSizes.Add((UInt64)inputStream.Sizes[i]);
                streamsInfo.SubStreamsInfo.Digests.CRCs[i] = inputStream.CRCs[i];
            }
        }
        public void SplitFiles()
        {
            // Get the test set
            var testData = CreateRandomSet();

            // Get the number of files to use
            var fileSet = Enumerable.Range( 0, Generator.Next( 4, 7 ) ).Select( i => GetUniqueFile() ).ToArray();

            // Split input data
            int blockSize = testData.Length / fileSet.Length;

            // Process
            for (int i = fileSet.Length; i > 0; )
            {
                // Get the size
                int end;
                if (i-- == fileSet.Length)
                    end = testData.Length;
                else
                    end = (i + 1) * blockSize;

                // Get the start
                int start = i * blockSize;

                // Copy
                using (var stream = File.Create( fileSet[i].FullName ))
                    stream.Write( testData, start, end - start );
            }

            // Load all but the last
            using (var stream = new MultiFileStream( fileSet.Take( fileSet.Length - 1 ) ))
            {
                // Get length
                long length = fileSet.Take( fileSet.Length - 1 ).Sum( f => f.Length );

                // Check size and name
                Assert.AreEqual( Path.GetFileNameWithoutExtension( fileSet[0].Name ), stream.Name, "name" );
                Assert.AreEqual( length, stream.Length, "partial length" );

                // Validate sub set
                Validate( stream, null, testData, 0, (int) length );

                // Get length
                length += fileSet[fileSet.Length - 1].Length;

                // Add the last file
                stream.Add( fileSet[fileSet.Length - 1] );

                // Check size 
                Assert.AreEqual( length, stream.Length, "full length" );

                // Validate full set
                Validate( stream, 0, testData, 0, (int) length );

                // Random read
                for (int c = 1000; c-- > 0; )
                {
                    // Find length to process
                    int count = Generator.Next( testData.Length / 10 );

                    // Find some position to start at
                    int offset = Generator.Next( 0, testData.Length - count );

                    // Read it
                    Validate( stream, offset, testData, offset, count );
                }

                // Reset
                stream.Position = 0;

                // Find length to process
                int part = Generator.Next( testData.Length / 20 );

                // Full partial read
                for (int offset = 0; offset < testData.Length; offset += part)
                {
                    // Process part
                    Validate( stream, null, testData, offset, Math.Min( part, testData.Length - offset ) );
                }

                // Create extended buffer
                byte[] extended = new byte[2 * testData.Length];

                // Reset
                stream.Position = 0;

                // Read extended buffer
                Assert.AreEqual( testData.Length, stream.Read( extended, 0, extended.Length ), "Clip (1)" );
                Assert.AreEqual( 0, stream.Read( extended, 0, extended.Length ), "Clip (2)" );
            }
        }
        public void SplitFiles()
        {
            // Get the test set
            var testData = CreateRandomSet();

            // Get the number of files to use
            var fileSet = Enumerable.Range(0, Generator.Next(4, 7)).Select(i => GetUniqueFile()).ToArray();

            // Split input data
            int blockSize = testData.Length / fileSet.Length;

            // Process
            for (int i = fileSet.Length; i > 0;)
            {
                // Get the size
                int end;
                if (i-- == fileSet.Length)
                {
                    end = testData.Length;
                }
                else
                {
                    end = (i + 1) * blockSize;
                }

                // Get the start
                int start = i * blockSize;

                // Copy
                using (var stream = File.Create(fileSet[i].FullName))
                    stream.Write(testData, start, end - start);
            }

            // Load all but the last
            using (var stream = new MultiFileStream(fileSet.Take(fileSet.Length - 1)))
            {
                // Get length
                long length = fileSet.Take(fileSet.Length - 1).Sum(f => f.Length);

                // Check size and name
                Assert.AreEqual(Path.GetFileNameWithoutExtension(fileSet[0].Name), stream.Name, "name");
                Assert.AreEqual(length, stream.Length, "partial length");

                // Validate sub set
                Validate(stream, null, testData, 0, (int)length);

                // Get length
                length += fileSet[fileSet.Length - 1].Length;

                // Add the last file
                stream.Add(fileSet[fileSet.Length - 1]);

                // Check size
                Assert.AreEqual(length, stream.Length, "full length");

                // Validate full set
                Validate(stream, 0, testData, 0, (int)length);

                // Random read
                for (int c = 1000; c-- > 0;)
                {
                    // Find length to process
                    int count = Generator.Next(testData.Length / 10);

                    // Find some position to start at
                    int offset = Generator.Next(0, testData.Length - count);

                    // Read it
                    Validate(stream, offset, testData, offset, count);
                }

                // Reset
                stream.Position = 0;

                // Find length to process
                int part = Generator.Next(testData.Length / 20);

                // Full partial read
                for (int offset = 0; offset < testData.Length; offset += part)
                {
                    // Process part
                    Validate(stream, null, testData, offset, Math.Min(part, testData.Length - offset));
                }

                // Create extended buffer
                byte[] extended = new byte[2 * testData.Length];

                // Reset
                stream.Position = 0;

                // Read extended buffer
                Assert.AreEqual(testData.Length, stream.Read(extended, 0, extended.Length), "Clip (1)");
                Assert.AreEqual(0, stream.Read(extended, 0, extended.Length), "Clip (2)");
            }
        }