Example #1
0
        public async Task UploadChunkAsync(UploadJob job, UploadItem item)
        {
            //create reference to a part of the input stream
            var chunkStream   = GlacierUtils.CreatePartStream(item.DataStream, job.ChunkSize);
            var chunkChecksum = TreeHashGenerator.CalculateTreeHash(chunkStream);

            //prepare request
            var request = new UploadMultipartPartRequest
            {
                VaultName = job.VaultName,
                Body      = chunkStream,
                Checksum  = chunkChecksum,
                UploadId  = job.UploadId
            };

            //set range of the current part
            request.SetRange(job.CurrentPosition,
                             job.CurrentPosition + chunkStream.Length - 1);

            //upload this part
            var response = await _client.UploadMultipartPartAsync(request);

            response.EnsureSuccess();

            //commit progress
            job.ChunkChecksums.Add(chunkChecksum);
            job.CurrentPosition += chunkStream.Length;
        }
Example #2
0
        protected override void OnStart(string[] args)
        {
            scheduler = StdSchedulerFactory.GetDefaultScheduler();
            scheduler.Start();

            UploadJob.Schedule(scheduler);
        }
Example #3
0
        public async Task <UploadJob> InitializeAsync(
            string containerName, string description, long?chunkSize = null)
        {
            //create the vault if not exists
            await CreateVaultAsync(containerName);

            //prepare request
            var request = new InitiateMultipartUploadRequest
            {
                VaultName          = containerName,
                PartSize           = chunkSize ?? DefaultChunkSize,
                ArchiveDescription = description
            };

            //create place holder for parts of a archive
            var response = await _client.InitiateMultipartUploadAsync(request);

            var job = new UploadJob
            {
                UploadId  = response.UploadId,
                VaultName = containerName,
                ChunkSize = chunkSize ?? DefaultChunkSize
            };

            return(job);
        }
Example #4
0
        public void PrintProgress(UploadJob job)
        {
            var percent  = (int)Math.Floor((double)job.CurrentPosition / job.TotalSize * 100);
            var fileName = Path.GetFileName(job.FilePath);

            PrintProgress(fileName, percent);

            if (percent == 100)
            {
                PrintDone();
            }
        }
Example #5
0
        public void UploadFile()
        {
            Blob      blob = new Blob(IOHelper.CreateTempFile("Just the content.")).SetFilename("myFile.doc");
            UploadJob job  = new UploadJob(blob);

            normalBatch = normalBatch.Upload(job).Result;
            Assert.NotNull(normalBatch);
            Assert.NotNull(normalBatch.FileIndex);
            Assert.NotNull(normalBatch.UploadType);
            Assert.NotNull(normalBatch.UploadSize);
            Assert.Equal(0, normalBatch.FileIndex);
            Assert.Equal("normal", normalBatch.UploadType);
            Assert.Equal(17, normalBatch.UploadSize);
        }
Example #6
0
        public void UploadFileChuncked()
        {
            Blob      blob      = new Blob(IOHelper.CreateTempFile("This content is chunked. Seriously, really chunked!")).SetFilename("chunked.docx");
            int       chunkSize = (int)Math.Ceiling((double)blob.File.Length / 5);
            UploadJob job       = new UploadJob(blob);

            job.SetChunked(true);
            job.SetChunkSize(chunkSize);
            chunkedBatch = chunkedBatch.Upload(job).Result;
            BatchFile info = chunkedBatch.Info(chunkedBatch.FileIndex).Result;

            Assert.NotNull(info);
            Assert.Equal(5, info.ChunkCount);
            Assert.Equal(5, info.UploadedChunkIds.Length);
        }
Example #7
0
        public void UploadAnotherFile()
        {
            // test uploading a second with name that needs escaping
            Blob      blob = new Blob(IOHelper.CreateTempFile("Yet more content.")).SetFilename("行动计划 + test.pdf");
            UploadJob job  = new UploadJob(blob).SetFileId(1);

            normalBatch = normalBatch.Upload(job).Result;
            Assert.NotNull(normalBatch);
            Assert.NotNull(normalBatch.FileIndex);
            Assert.NotNull(normalBatch.UploadType);
            Assert.NotNull(normalBatch.UploadSize);
            Assert.Equal(1, normalBatch.FileIndex);
            Assert.Equal("normal", normalBatch.UploadType);
            Assert.Equal(17, normalBatch.UploadSize);
        }
        public static UploadJob UploadFile(string bucketId, string fileName, string localPath, io.storj.libstorj.Storj storj)
        {
            UploadJob job = new UploadJob(fileName);
            UploadFileCallbackAsync callback = new UploadFileCallbackAsync(job);

            try
            {
                var handle = storj.uploadFile(bucketId, fileName, localPath, callback);
                job.Id = handle;
            }
            catch (io.storj.libstorj.KeysNotFoundException)
            {
                throw new KeysNotFoundException();
            }
            return(job);
        }
Example #9
0
        public void UploadFromJobs()
        {
            if (!SystemInfo.supportsComputeShaders)
            {
                Assert.Ignore("Skipped due to platform/computer not supporting compute shaders");
                return;
            }

            var initialData = new ExampleStruct[4 * 1024];
            var stride      = UnsafeUtility.SizeOf <ExampleStruct>();

            for (int i = 0; i < initialData.Length; ++i)
            {
                initialData[i] = new ExampleStruct {
                    someData = 0
                };
            }

            var buffer = new ComputeBuffer(initialData.Length, stride);

            buffer.SetData(initialData);

            var uploader = new SparseUploader(buffer);

            var job = new UploadJob();

            job.uploader = uploader.Begin(initialData.Length * stride, initialData.Length);
            job.Schedule(initialData.Length, 64).Complete();

            uploader.EndAndCommit(job.uploader);

            var resultingData = new ExampleStruct[initialData.Length];

            buffer.GetData(resultingData);

            for (int i = 0; i < resultingData.Length; ++i)
            {
                Assert.AreEqual(i, resultingData[i].someData);
            }

            uploader.Dispose();
            buffer.Dispose();
        }
Example #10
0
        /// <summary>
        /// Executes a an <see cref="UploadJob"/>.
        /// </summary>
        /// <param name="job">The <see cref="UploadJob"/> to be executed.</param>
        /// <returns>A new and updated <see cref="Batch"/> instance of the current batch.</returns>
        public async Task <Batch> Upload(UploadJob job)
        {
            if (job.IsChunked)
            {
                int    readBytes, currentChunk = 0, chunkCount = (int)Math.Ceiling((double)job.Blob.File.Length / job.ChunkSize);
                byte[] buffer = new byte[job.ChunkSize];
                Batch  batch  = null;
                using (FileStream fs = job.Blob.File.OpenRead())
                {
                    while ((readBytes = fs.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        batch = (Batch)await client.PostBin(UrlCombiner.Combine(Endpoint, job.FileId.ToString()),
                                                            null,
                                                            buffer,
                                                            new Dictionary <string, string>() {
                            { "X-Upload-Type", "chunked" },
                            { "X-Upload-Chunk-Index", currentChunk.ToString() },
                            { "X-Upload-Chunk-Count", chunkCount.ToString() },
                            { "X-File-Name", Uri.EscapeDataString(job.Blob.Filename) },
                            { "X-File-Type", job.Blob.MimeType },
                            { "X-File-Size", job.Blob.File.Length.ToString() }
                        });

                        currentChunk++;
                    }
                }
                return(batch);
            }
            else
            {
                using (FileStream fs = job.Blob.File.OpenRead())
                {
                    return((Batch)await client.PostBin(UrlCombiner.Combine(Endpoint, job.FileId.ToString()),
                                                       null,
                                                       fs.ReadToEnd(),
                                                       new Dictionary <string, string>() {
                        { "X-File-Name", Uri.EscapeDataString(job.Blob.Filename) },
                        { "X-File-Type", job.Blob.MimeType }
                    }));
                }
            }
        }
Example #11
0
        // Helper to save files and create job in database
        public bool SaveFile(HttpPostedFileBase indicatorDataFile, UploadJobType jobType)
        {
            bool response;

            if (Request.Files == null)
            {
                return(false);
            }

            var guid           = Guid.NewGuid();
            var file           = Request.Files[0];
            var actualFileName = file.FileName;
            var fileName       = guid + Path.GetExtension(file.FileName);

            try
            {
                if (!Directory.Exists(AppConfig.UploadFolder))
                {
                    Directory.CreateDirectory(AppConfig.UploadFolder);
                }

                file.SaveAs(Path.Combine(AppConfig.UploadFolder, fileName));
                var uploadJob = new UploadJob
                {
                    DateCreated = DateTime.Now,
                    Guid        = guid,
                    Filename    = actualFileName,
                    JobType     = jobType,
                    Status      = UploadJobStatus.NotStart,
                    UserId      = UserDetails.CurrentUser().Id
                };

                _fpmUploadRepository.CreateJob(uploadJob);
                response = true;
            }
            catch (Exception ex)
            {
                response = false;
            }

            return(response);
        }
Example #12
0
        public async Task <string> FinishUploadAsync(UploadJob job, UploadItem item)
        {
            var checksum = TreeHashGenerator.CalculateTreeHash(job.ChunkChecksums);

            //prepare request
            var request = new CompleteMultipartUploadRequest
            {
                UploadId    = job.UploadId,
                ArchiveSize = item.ContentLength.ToString(),
                Checksum    = checksum,
                VaultName   = job.VaultName
            };

            //finish up multipart upload
            var response = await _client.CompleteMultipartUploadAsync(request);

            var achiveId = response.ArchiveId;

            return(achiveId);
        }
Example #13
0
 /// <summary>
 /// Executes a an <see cref="UploadJob"/>.
 /// </summary>
 /// <param name="job">The <see cref="UploadJob"/> to be executed.</param>
 /// <returns>A new and updated <see cref="Batch"/> instance of the current batch.</returns>
 public async Task<Batch> Upload(UploadJob job)
 {
     if (job.IsChunked)
     {
         int readBytes, currentChunk = 0, chunkCount = (int)Math.Ceiling((double)job.Blob.File.Length / job.ChunkSize);
         byte[] buffer = new byte[job.ChunkSize];
         Batch batch = null;
         using (FileStream fs = job.Blob.File.OpenRead())
         {
             while ((readBytes = fs.Read(buffer, 0, buffer.Length)) > 0)
             {
                 batch = (Batch)await client.PostBin(UrlCombiner.Combine(Endpoint, job.FileId.ToString()),
                                         null,
                                         buffer,
                                         new Dictionary<string, string>() {
                                         { "X-Upload-Type", "chunked" },
                                         { "X-Upload-Chunk-Index", currentChunk.ToString() },
                                         { "X-Upload-Chunk-Count", chunkCount.ToString() },
                                         { "X-File-Name", job.Blob.Filename },
                                         { "X-File-Type", job.Blob.MimeType },
                                         { "X-File-Size", job.Blob.File.Length.ToString() }
                                         });
                 currentChunk++;
             }
         }
         return batch;
     }
     else
     {
         using (FileStream fs = job.Blob.File.OpenRead())
         {
             return (Batch)await client.PostBin(UrlCombiner.Combine(Endpoint, job.FileId.ToString()),
                                         null,
                                         fs.ReadToEnd(),
                                         new Dictionary<string, string>() {
                                             { "X-File-Name", job.Blob.Filename },
                                             { "X-File-Type", job.Blob.MimeType }
                                         });
         }
     }
 }
Example #14
0
 public void UploadFileChuncked()
 {
     Blob blob = new Blob(IOHelper.CreateTempFile("This content is chunked. Seriously, really chunked!")).SetFilename("chunked.docx");
     int chunkSize = (int)Math.Ceiling((double)blob.File.Length / 5);
     UploadJob job = new UploadJob(blob);
     job.SetChunked(true);
     job.SetChunkSize(chunkSize);
     chunkedBatch = chunkedBatch.Upload(job).Result;
     BatchFile info = chunkedBatch.Info(chunkedBatch.FileIndex).Result;
     Assert.NotNull(info);
     Assert.Equal(5, info.ChunkCount);
     Assert.Equal(5, info.UploadedChunkIds.Length);
 }
Example #15
0
 private void UpdateProgress(UploadJob job, UploadItem item)
 {
     ProgressEvent?.Invoke(job, item);
 }
Example #16
0
 /// <summary>
 /// Cancels an upload-job
 /// </summary>
 /// <param name="job">The job to cancel</param>
 /// <returns>True, if the job got cancelled; false if not</returns>
 public bool CancelUpload(UploadJob job)
 {
     return(_storjJava.cancelUpload(job.Id));
 }
 private UploadFileCallbackAsync(UploadJob job)
 {
     _job = job;
 }
Example #18
0
 public void UploadFile()
 {
     Blob blob = new Blob(IOHelper.CreateTempFile("Just the content.")).SetFilename("myFile.doc");
     UploadJob job = new UploadJob(blob);
     normalBatch = normalBatch.Upload(job).Result;
     Assert.NotNull(normalBatch);
     Assert.NotNull(normalBatch.FileIndex);
     Assert.NotNull(normalBatch.UploadType);
     Assert.NotNull(normalBatch.UploadSize);
     Assert.Equal(0, normalBatch.FileIndex);
     Assert.Equal("normal", normalBatch.UploadType);
     Assert.Equal(17, normalBatch.UploadSize);
 }
Example #19
0
        private async Task<Batch> ProcessFile(string path)
        {
            Batch batch = null;
            await semaphore.WaitAsync();
            try
            {
                if (!File.Exists(path))
                {
                    throw new FileNotFoundException(path);
                }

                int i;
                lock (syncCounter)
                {
                    i = processedFilesCounter++;
                }

                UploadJob job = new UploadJob(Blob.FromFile(path));
                job.SetFileId(i);
                job.SetChunked(IsChunkedUpload);
                job.SetChunkSize(ChunkSize);
                batch = await UploadBlob(job);
            }
            finally
            {
                semaphore.Release();
            }
            return batch;
        }
Example #20
0
 private async Task<Batch> UploadBlob(UploadJob job)
 {
     try
     {
         return await Batch.Upload(job);
     }
     catch (ServerErrorException exception)
     {
         throw new FailedToUploadException(job.ToString(), exception);
     }
 }
Example #21
0
 public void UploadAnotherFile()
 {
     Blob blob = new Blob(IOHelper.CreateTempFile("Yet more content.")).SetFilename("anoterFile.pdf");
     UploadJob job = new UploadJob(blob).SetFileId(1);
     normalBatch = normalBatch.Upload(job).Result;
     Assert.NotNull(normalBatch);
     Assert.NotNull(normalBatch.FileIndex);
     Assert.NotNull(normalBatch.UploadType);
     Assert.NotNull(normalBatch.UploadSize);
     Assert.Equal(1, normalBatch.FileIndex);
     Assert.Equal("normal", normalBatch.UploadType);
     Assert.Equal(17, normalBatch.UploadSize);
 }