public void TestZipUnzipRandomSucceeds()
        {
            //// ARRANGE

            var source = new byte[992];

            var r = new Random(42);

            r.NextBytes(source);

            //// ACT

            var deflatedSource = DeflateHelper.Zip(source);

            Assert.AreEqual(997, deflatedSource.Length);

            var unzipped = DeflateHelper.Unzip(deflatedSource);

            //// ASSERT

            Assert.AreEqual(992, unzipped.Length);

            for (int i = 0; i < 992; i++)
            {
                Assert.AreEqual(source[i], unzipped[i]);
            }
        }
        public void TestZipUnzipSucceeds()
        {
            //// ARRANGE

            var source = new byte[992];

            for (int i = 0; i < source.Length; i++)
            {
                source[i] = 42;
            }

            //// ACT

            var deflatedSource = DeflateHelper.Zip(source);

            Assert.AreEqual(11, deflatedSource.Length);

            var unzipped = DeflateHelper.Unzip(deflatedSource);

            //// ASSERT

            Assert.AreEqual(992, unzipped.Length);

            for (int i = 0; i < 992; i++)
            {
                Assert.AreEqual(source[i], unzipped[i]);
            }
        }
Ejemplo n.º 3
0
        /// <inheritdoc/>
        public async Task <UInt32> TransformChunkAsync(
            Stream input,
            UInt32 inputChunkSize,
            Stream output,
            NefsDataTransform transform,
            NefsProgress p)
        {
            using (var transformedStream = new MemoryStream())
            {
                // Copy raw chunk to temp stream
                await input.CopyPartialAsync(transformedStream, inputChunkSize, p.CancellationToken);

                transformedStream.Seek(0, SeekOrigin.Begin);

                // Compress
                if (transform.IsZlibCompressed)
                {
                    using (var tempStream = new MemoryStream())
                    {
                        await DeflateHelper.DeflateAsync(transformedStream, (int)inputChunkSize, tempStream);

                        tempStream.Seek(0, SeekOrigin.Begin);

                        transformedStream.Seek(0, SeekOrigin.Begin);
                        await tempStream.CopyPartialAsync(transformedStream, tempStream.Length, p.CancellationToken);

                        transformedStream.Seek(0, SeekOrigin.Begin);
                        transformedStream.SetLength(tempStream.Length);
                    }
                }

                // Encrypt
                if (transform.IsAesEncrypted)
                {
                    using (var aesManager = this.CreateAesManager(transform.Aes256Key))
                        using (var cryptoStream = new CryptoStream(transformedStream, aesManager.CreateEncryptor(), CryptoStreamMode.Read, leaveOpen: true))
                            using (var tempStream = new MemoryStream())
                            {
                                await cryptoStream.CopyToAsync(tempStream, p.CancellationToken);

                                tempStream.Seek(0, SeekOrigin.Begin);

                                transformedStream.Seek(0, SeekOrigin.Begin);
                                await tempStream.CopyPartialAsync(transformedStream, tempStream.Length, p.CancellationToken);

                                transformedStream.Seek(0, SeekOrigin.Begin);
                                transformedStream.SetLength(tempStream.Length);
                            }
                }

                // Copy transformed chunk to output stream
                await transformedStream.CopyToAsync(output, p.CancellationToken);

                // Return size of transformed chunk
                return((uint)transformedStream.Length);
            }
        }
Ejemplo n.º 4
0
        /// <inheritdoc/>
        public async Task <NefsItemSize> CompressAsync(
            Stream input,
            Int64 inputOffset,
            UInt32 inputLength,
            Stream output,
            Int64 outputOffset,
            UInt32 chunkSize,
            NefsProgress p)
        {
            var chunkSizes = new List <UInt32>();

            input.Seek(inputOffset, SeekOrigin.Begin);
            output.Seek(outputOffset, SeekOrigin.Begin);

            // Split file into chunks and compress them
            using (var t = p.BeginTask(1.0f, $"Compressing stream"))
            {
                var lastChunkSize  = 0;
                var totalChunkSize = 0;
                var lastBytesRead  = 0;
                var bytesRemaining = (int)inputLength;

                // Determine how many chunks to split file into
                var numChunks = (int)Math.Ceiling(inputLength / (double)chunkSize);

                for (var i = 0; i < numChunks; ++i)
                {
                    using (var st = p.BeginSubTask(1.0f / numChunks, $"Compressing chunk {i + 1}/{numChunks}"))
                    {
                        var nextBytes = Math.Min(chunkSize, bytesRemaining);

                        // Compress this chunk and write it to the output file
                        (lastBytesRead, lastChunkSize) = await DeflateHelper.DeflateAsync(input, (int)nextBytes, output, p.CancellationToken);

                        totalChunkSize += lastChunkSize;
                        bytesRemaining -= lastBytesRead;

                        // Record the total compressed size after this chunk
                        chunkSizes.Add((UInt32)totalChunkSize);
                    }
                }
            }

            // Return item size
            return(new NefsItemSize(inputLength, chunkSizes));
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Compresses the specified input file and updates the archive
        /// header entries. The item is marked as pending injection
        /// so the comrpessed data is injected when the archive is saved.
        /// </summary>
        /// <param name="inputFilePath">Path to the uncompressed file to compress and inject.</param>
        public void Inject(string inputFilePath, NefsProgressInfo p)
        {
            float taskWeightPrep           = 0.05f;
            float taskWeightCompress       = 0.85f;
            float taskWeightUpdateMetadata = 0.05f;
            float taskWeightCleanup        = 0.05f;

            p.BeginTask(taskWeightPrep, "Preparing file injection...");

            /* Prepare the temporary working directory */
            if (!Directory.Exists(FilePathHelper.TempDirectory))
            {
                Directory.CreateDirectory(FilePathHelper.TempDirectory);
            }

            /*
             * Create a temporary directory to compress input file in
             */

            /* Temp working directory */
            var workDir = Path.Combine(FilePathHelper.TempDirectory, _archive.FilePathHash, FilePathInArchiveHash);

            /* Delete the working directory if exists and recreate it */
            if (Directory.Exists(workDir))
            {
                Directory.Delete(workDir, true);
            }

            Directory.CreateDirectory(workDir);

            p.EndTask();

            /*
             * SPLIT INPUT FILE INTO CHUNKS AND COMPRESS THEM
             */
            p.BeginTask(taskWeightCompress, "Compressing file...");

            int compressedSizeDiff = 0;
            int currentChunk       = 0;
            var destFilePath       = Path.Combine(workDir, "inject.dat");
            int numChunks          = 0;
            int numChunksDiff      = 0;
            int oldNumChunks       = ChunkSizes.Count();

            /* Open the input file */
            using (var inputFile = new FileStream(inputFilePath, FileMode.Open))
            {
                inputFile.Seek(0, SeekOrigin.Begin);

                /* Determine how many chunks to split file into */
                numChunks      = (int)Math.Ceiling(inputFile.Length / (double)CHUNK_SIZE);
                _extractedSize = (UInt32)inputFile.Length;

                /* Clear out chunk sizes list so we can rebuild it */
                ChunkSizes.Clear();

                int lastBytesRead  = 0;
                int totalBytesRead = 0;
                int lastChunkSize  = 0;
                int totalChunkSize = 0;

                using (var outputFile = new FileStream(destFilePath, FileMode.Create))
                {
                    do
                    {
                        p.BeginTask(1.0f / (float)numChunks, String.Format("Compressing chunk {0}/{1}...", currentChunk + 1, numChunks));

                        /* Compress this chunk and write it to the output file */
                        lastBytesRead = DeflateHelper.DeflateToFile(inputFile, CHUNK_SIZE, outputFile, out lastChunkSize);

                        totalBytesRead += lastBytesRead;
                        totalChunkSize += lastChunkSize;
                        currentChunk++;

                        /* Record the total compressed size after this chunk */
                        ChunkSizes.Add((uint)totalChunkSize);

                        p.EndTask();
                    } while (lastBytesRead == CHUNK_SIZE);

                    /* Get difference in number of chunks compared to item we replaced */
                    numChunksDiff = numChunks - oldNumChunks;

                    /* Get the difference in compressed size */
                    compressedSizeDiff = (int)outputFile.Length - (int)_compressedSize;

                    /* Update new compressed size */
                    _compressedSize = (uint)outputFile.Length;

                    /* Quick sanity check */
                    if (_compressedSize != totalChunkSize)
                    {
                        log.Error("Compressed file size different than what was expected.");
                    }
                }
            }

            /* Quick sanity check */
            if (currentChunk != numChunks)
            {
                log.Error("Did not create the expected number of chunks.");
            }

            p.EndTask();

            /*
             * UPDATE METADATA FOR ITEMS AFTER THIS ONE
             */
            p.BeginTask(taskWeightUpdateMetadata, "Updating archive metadata...");

            /* Update data offsets for each item AFTER this one */
            for (int i = 0; i < Archive.Items.Count; i++)
            {
                var item = Archive.Items[i];

                /* Directories don't have data offsets, skip them */
                if (item.Type == NefsItemType.Directory)
                {
                    continue;
                }

                /* Update an item's data offset if it was after the item we are updating now */
                if (item.DataOffset > this.DataOffset)
                {
                    /* Update the data offset */
                    UInt64 prevOffset = item.DataOffset;
                    UInt64 newOffset  = (ulong)((long)prevOffset + (long)compressedSizeDiff);

                    if (newOffset < 0)
                    {
                        throw new Exception(String.Format(
                                                "New data offset less than zero. [file={0}]",
                                                item.Filename));
                    }

                    item.DataOffset = newOffset;
                }

                /* Skip the weird 0xFFFFFFFF offsets into pt 4 */
                if (item.OffsetIntoPt4Raw == 0xFFFFFFFF)
                {
                    continue;
                }

                /* Update an item's part 4 offset if it was after the item we are updating now */
                if (item.OffsetIntoPt4Raw > this.OffsetIntoPt4Raw)
                {
                    /* Update the header part 4 offset */
                    int prevOffsetIntoPt4Raw = (int)item.OffsetIntoPt4Raw;
                    int newOffsetIntoPt4Raw  = prevOffsetIntoPt4Raw + numChunksDiff;

                    if (newOffsetIntoPt4Raw < 0)
                    {
                        throw new Exception(String.Format(
                                                "New offset into part 4 less than zero. [file={0}]",
                                                item.Filename));
                    }

                    item.OffsetIntoPt4Raw = (UInt32)newOffsetIntoPt4Raw;
                }
            }

            p.EndTask();

            /*
             * CLEANUP
             */
            p.BeginTask(taskWeightCleanup, "Cleaning up...");

            /* Whenever the archive is saved, this data will be injected */
            _pendingInjection = true;
            _fileToInject     = destFilePath;
            _archive.Modified = true;

            p.EndTask();
        }
Ejemplo n.º 6
0
        /// <summary>
        /// This method is called whenever the module is sent a message from the EdgeHub.
        /// It just pipe the messages without any change.
        /// It prints all the incoming messages.
        /// </summary>
        private static async Task <MessageResponse> PipeMessageInputOne(Message message, object userContext)
        {
            int counterValue = Interlocked.Increment(ref counter);

            var moduleClient = userContext as ModuleClient;

            if (moduleClient == null)
            {
                throw new InvalidOperationException("UserContext doesn't contain " + "expected values");
            }

            var connectionDeviceId = message.ConnectionDeviceId;

            var connectionModuleId = message.ConnectionModuleId;

            byte[] messageBytes = message.GetBytes();

            if (message.ContentEncoding == "gzip" &&
                message.ContentType == "application/zip")
            {
                var zippedLength = messageBytes.Length;
                messageBytes = GZipHelper.Unzip(messageBytes);
                System.Console.WriteLine($"Uncompressed from GZIP {zippedLength} bytes to {messageBytes.Length} bytes");
            }

            if (message.ContentEncoding == "deflate" &&
                message.ContentType == "application/zip")
            {
                var zippedLength = messageBytes.Length;
                messageBytes = DeflateHelper.Unzip(messageBytes);
                System.Console.WriteLine($"Uncompressed from Deflate {zippedLength} bytes to {messageBytes.Length} bytes");
            }

            string messageString = Encoding.UTF8.GetString(messageBytes);

            Console.WriteLine($"-> Received echo message: {counterValue}, Body: '{messageString}'");

            if (!string.IsNullOrEmpty(messageString))
            {
                var messageBody = JsonConvert.DeserializeObject(messageString);

                var moduleOutput = _moduleOutputs.GetModuleOutput("output1");

                if (moduleOutput != null)
                {
                    moduleOutput.Properties.Clear();

                    foreach (var prop in message.Properties)
                    {
                        moduleOutput.Properties.Add(prop.Key, prop.Value);

                        Console.WriteLine($"Property added: key:'{prop.Key}' value:'{prop.Value}'");
                    }

                    if (!string.IsNullOrEmpty(connectionDeviceId))
                    {
                        Console.WriteLine($"connectionDeviceId: '{connectionDeviceId}'");
                    }

                    if (!string.IsNullOrEmpty(connectionModuleId))
                    {
                        Console.WriteLine($"ConnectionModuleId: '{connectionModuleId}'");
                    }

                    await moduleOutput.SendMessage(messageBody);

                    Console.WriteLine("Received message echoed");
                }
            }

            return(MessageResponse.Completed);
        }