CmdletOutput DownloadFolderFromS3(ExecutorContext context) { var cmdletContext = context as CmdletContext; var request = new TransferUtilityDownloadDirectoryRequest { BucketName = cmdletContext.BucketName, LocalDirectory = cmdletContext.Folder, S3Directory = cmdletContext.KeyPrefix }; if (cmdletContext.UtcModifiedSinceDate.HasValue) { request.ModifiedSinceDateUtc = cmdletContext.UtcModifiedSinceDate.Value; } if (cmdletContext.UtcUnmodifiedSinceDate.HasValue) { request.UnmodifiedSinceDateUtc = cmdletContext.UtcUnmodifiedSinceDate.Value; } #pragma warning disable CS0618, CS0612 //A class member was marked with the Obsolete attribute if (cmdletContext.ModifiedSinceDate.HasValue) { if (cmdletContext.UtcModifiedSinceDate != null) { throw new ArgumentException("Parameters ModifiedSinceDate and UtcModifiedSinceDate are mutually exclusive."); } request.ModifiedSinceDate = cmdletContext.ModifiedSinceDate.Value; } if (cmdletContext.UnmodifiedSinceDate.HasValue) { if (cmdletContext.UtcUnmodifiedSinceDate != null) { throw new ArgumentException("Parameters UnmodifiedSinceDate and UtcUnmodifiedSinceDate are mutually exclusive."); } request.UnmodifiedSinceDate = cmdletContext.UnmodifiedSinceDate.Value; } #pragma warning restore CS0618, CS0612 //A class member was marked with the Obsolete attribute CmdletOutput output; using (var tu = new TransferUtility(Client ?? CreateClient(_CurrentCredentials, _RegionEndpoint))) { Utils.Common.WriteVerboseEndpointMessage(this, Client.Config, "Amazon S3 object download APIs"); var runner = new ProgressRunner(this); var tracker = new DownloadFolderProgressTracker(runner, handler => request.DownloadedDirectoryProgressEvent += handler); output = runner.SafeRun(() => tu.DownloadDirectory(request), tracker); if (output.ErrorResponse == null) { output.PipelineOutput = new DirectoryInfo(cmdletContext.Folder); } WriteVerbose(string.Format("Downloaded {0} object(s) from bucket '{1}' with keyprefix '{2}' to '{3}'", tracker.DownloadedCount, cmdletContext.BucketName, cmdletContext.OriginalKeyPrefix, cmdletContext.Folder)); } return(output); }
void DownloadDirectory(string directoryName, DirectoryProgressValidator <DownloadDirectoryProgressArgs> progressValidator, bool concurrent = true) { var directoryPath = Path.Combine(basePath, directoryName); UploadDirectory(directoryName, 20 * MEG_SIZE, null, false); Directory.Delete(directoryPath, true); var transferUtility = new TransferUtility(Client); var request = new TransferUtilityDownloadDirectoryRequest { BucketName = bucketName, LocalDirectory = directoryPath, S3Directory = directoryName }; if (progressValidator != null) { request.DownloadedDirectoryProgressEvent += progressValidator.OnProgressEvent; } transferUtility.DownloadDirectory(request); ValidateDirectoryContents(bucketName, directoryName, directoryPath); }
internal DownloadDirectoryCommand(IAmazonS3 s3Client, TransferUtilityDownloadDirectoryRequest request) { if (s3Client == null) { throw new ArgumentNullException("s3Client"); } this._s3Client = s3Client; this._request = request; this._skipEncryptionInstructionFiles = s3Client is Amazon.S3.Internal.IAmazonS3Encryption; }
public static async Task <bool> S3DirectoryDownloadAsync() { try { TransferUtilityDownloadDirectoryRequest request = new TransferUtilityDownloadDirectoryRequest() { BucketName = bucketName, S3Directory = "CsvDataTables/", LocalDirectory = @"d:\MFContrast\CsvData", }; await TransferUtility.DownloadDirectoryAsync(request); return(true); } catch (AmazonS3Exception e) { Console.WriteLine("Error in S3DownloadAsync" + e); return(false); } }
void DownloadDirectory(DirectoryProgressValidator <DownloadDirectoryProgressArgs> progressValidator, bool concurrent = true) { var directory = UploadDirectory(20 * MEG_SIZE, null, false); var directoryPath = directory.FullName; var keyPrefix = directory.Name; Directory.Delete(directoryPath, true); var transferUtility = new TransferUtility(Client); var request = new TransferUtilityDownloadDirectoryRequest { BucketName = bucketName, LocalDirectory = directoryPath, S3Directory = keyPrefix }; if (progressValidator != null) { request.DownloadedDirectoryProgressEvent += progressValidator.OnProgressEvent; } transferUtility.DownloadDirectory(request); ValidateDirectoryContents(Client, bucketName, keyPrefix, directory); }
internal DownloadDirectoryCommand(IAmazonS3 s3Client, TransferUtilityDownloadDirectoryRequest request) { this._s3Client = s3Client; this._request = request; //this._config = config; }
public async Task <AnnotationPackage> DownloadPackageAsync(AnnotationPackage package, CancellationToken token = default(CancellationToken)) { this._packagesToDownload.Enqueue(package); package.Enqueued = true; while (this._packagesToDownload.Peek() != package) { await Task.Delay(1000).ConfigureAwait(false); } package.Enqueued = false; this._downloadedPackage = package; if (!Directory.Exists(this._extractionFolder)) { Directory.CreateDirectory(this._extractionFolder); } var packagePath = Path.Combine(this._extractionFolder, package.PackageName); var tempPath = $"{packagePath}_temp"; if (Directory.Exists(packagePath)) { Directory.Delete(packagePath, true); } var files = await this._s3Client.ListObjectsV2Async(new ListObjectsV2Request { BucketName = this._bucketName, Prefix = package.PackageName }); var request = new TransferUtilityDownloadDirectoryRequest { BucketName = this._bucketName, S3Directory = package.PackageName, LocalDirectory = tempPath }; request.DownloadedDirectoryProgressEvent += this.DownloadedDirectoryProgressEvent; try { using (var fileTransferUtility = new TransferUtility(this._s3Client)) { await fileTransferUtility.DownloadDirectoryAsync(request, token).ConfigureAwait(false); } } finally { request.DownloadedDirectoryProgressEvent -= this.DownloadedDirectoryProgressEvent; package.Downloading = false; } if (Directory.Exists(tempPath)) { Directory.Move(tempPath, packagePath); } package.AvailableLocally = true; var path = Path.Combine(this._extractionFolder, package.PackageName); package.PrepareImages(path); this._packagesToDownload.Dequeue(); return(package); }
internal DownloadDirectoryCommand(AmazonS3 s3Client, TransferUtilityDownloadDirectoryRequest request) { this._s3Client = s3Client; this._request = request; }
internal DownloadDirectoryCommand(IAmazonS3 s3Client, TransferUtilityDownloadDirectoryRequest request, TransferUtilityConfig config) { this._s3Client = s3Client; this._request = request; this._config = config; }
/// <summary> /// Warning, if the book already exists in the location, this is going to delete it an over-write it. So it's up to the caller to check the sanity of that. /// </summary> /// <param name="storageKeyOfBookFolder"></param> public string DownloadBook(string storageKeyOfBookFolder, string pathToDestinationParentDirectory, ProgressDialog downloadProgress = null) { //TODO tell it not to download pdfs. Those are just in there for previewing purposes, we don't need to get them now that we're getting the real thing //review: should we instead save to a newly created folder so that we don't have to worry about the //other folder existing already? Todo: add a test for that first. if (!GetBookExists(storageKeyOfBookFolder)) { throw new DirectoryNotFoundException("The book we tried to download is no longer in the BloomLibrary"); } using (var tempDestination = new TemporaryFolder("BloomDownloadStaging " + storageKeyOfBookFolder + " " + Guid.NewGuid())) { var request = new TransferUtilityDownloadDirectoryRequest() { BucketName = _bucketName, S3Directory = storageKeyOfBookFolder, LocalDirectory = tempDestination.FolderPath }; int downloaded = 0; int initialProgress = 0; if (downloadProgress != null) { downloadProgress.Invoke((Action)(() => { downloadProgress.Progress++; // count getting set up as one step. initialProgress = downloadProgress.Progress; // might be one more step done, downloading order })); } int total = 14; // arbitrary (typical minimum files in project) request.DownloadedDirectoryProgressEvent += delegate(object sender, DownloadDirectoryProgressArgs args) { int progressMax = initialProgress + args.TotalNumberOfFiles; int currentProgress = initialProgress + args.NumberOfFilesDownloaded; if (downloadProgress != null && (progressMax != total || currentProgress != downloaded)) { total = progressMax; downloaded = currentProgress; // We only want to invoke if something really changed. downloadProgress.Invoke((Action)(() => { downloadProgress.ProgressRangeMaximum = progressMax; // probably only changes the first time downloadProgress.Progress = currentProgress; })); } }; _transferUtility.DownloadDirectory(request); //look inside the wrapper that we got var children = Directory.GetDirectories(tempDestination.FolderPath); if (children.Length != 1) { throw new ApplicationException( string.Format("Bloom expected to find a single directory in {0}, but instead there were {1}", tempDestination.FolderPath, children.Length)); } var destinationPath = Path.Combine(pathToDestinationParentDirectory, Path.GetFileName(children[0])); //clear out anything exisitng on our target if (Directory.Exists(destinationPath)) { Directory.Delete(destinationPath, true); } //if we're on the same volume, we can just move it. Else copy it. // It's important that books appear as nearly complete as possible, because a file watcher will very soon add the new // book to the list of downloaded books the user can make new ones from, once it appears in the target directory. if (Directory.GetDirectoryRoot(pathToDestinationParentDirectory) == Directory.GetDirectoryRoot(tempDestination.FolderPath)) { Directory.Move(children[0], destinationPath); } else { CopyDirectory(children[0], destinationPath); } return(destinationPath); } }
internal DownloadDirectoryCommand(IAmazonS3 s3Client, TransferUtilityDownloadDirectoryRequest request, TransferUtilityConfig config) : this(s3Client, request) { this._config = config; }