internal SimpleUploadCommand(IAmazonS3 s3Client, TransferUtilityConfig config, TransferUtilityUploadRequest fileTransporterRequest) { this._s3Client = s3Client; this._config = config; this._fileTransporterRequest = fileTransporterRequest; var fileName = fileTransporterRequest.FilePath; }
internal void Execute() { string basePath = new DirectoryInfo(this._request.Directory).FullName; string searchPattern = string.IsNullOrEmpty(this._request.SearchPattern) ? "*" : this._request.SearchPattern; string[] filePaths = Directory.GetFiles(this._request.Directory, searchPattern, this._request.SearchOption); this._totalNumberOfFiles = filePaths.Length; foreach (string filepath in filePaths) { this._currentFile = filepath; string key = filepath.Substring(basePath.Length); key = key.Replace(@"\", "/"); if(key.StartsWith("/")) key = key.Substring(1); TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest() .WithBucketName(this._request.BucketName) .WithKey(key) .WithFilePath(filepath) .WithCannedACL(this._request.CannedACL) .WithTimeout(this._request.Timeout) .WithMetadata(this._request.Metadata) .WithStorageClass(this._request.StorageClass); uploadRequest.UploadProgressEvent += new EventHandler<UploadProgressArgs>(uploadProgressEventCallback); this._utility.Upload(uploadRequest); this._numberOfFilesUploaded++; } }
/// <summary> /// Initializes a new instance of the <see cref="MultipartUploadCommand"/> class. /// </summary> /// <param name="s3Client">The s3 client.</param> /// <param name="config">The config object that has the number of threads to use.</param> /// <param name="fileTransporterRequest">The file transporter request.</param> internal MultipartUploadCommand(IAmazonS3 s3Client, TransferUtilityConfig config, TransferUtilityUploadRequest fileTransporterRequest) { this._config = config; if (fileTransporterRequest.IsSetFilePath()) { _logger.DebugFormat("Beginning upload of file {0}.", fileTransporterRequest.FilePath); } else { _logger.DebugFormat("Beginning upload of stream."); } this._s3Client = s3Client; this._fileTransporterRequest = fileTransporterRequest; this._contentLength = this._fileTransporterRequest.ContentLength; if (fileTransporterRequest.IsSetPartSize()) this._partSize = fileTransporterRequest.PartSize; else this._partSize = calculatePartSize(this._contentLength); if (fileTransporterRequest.InputStream != null) { if (fileTransporterRequest.AutoResetStreamPosition && fileTransporterRequest.InputStream.CanSeek) { fileTransporterRequest.InputStream.Seek(0, SeekOrigin.Begin); } } _logger.DebugFormat("Upload part size {0}.", this._partSize); }
private TransferUtilityUploadRequest ConstructRequest(string basePath, string filepath, string prefix) { string key = filepath.Substring(basePath.Length); key = key.Replace(@"\", "/"); if (key.StartsWith("/", StringComparison.Ordinal)) key = key.Substring(1); key = prefix + key; var uploadRequest = new TransferUtilityUploadRequest() { BucketName = this._request.BucketName, Key = key, FilePath = filepath, CannedACL = this._request.CannedACL, Metadata = this._request.Metadata, StorageClass = this._request.StorageClass, ServerSideEncryptionMethod = this._request.ServerSideEncryptionMethod, #if (BCL && !BCL45) Timeout = ClientConfig.GetTimeoutValue(this._config.DefaultTimeout, this._request.Timeout) #endif }; uploadRequest.UploadProgressEvent += new EventHandler<UploadProgressArgs>(UploadProgressEventCallback); return uploadRequest; }
/// <summary> /// Initializes a new instance of the <see cref="MultipartUploadCommand"/> class. /// </summary> /// <param name="s3Client">The s3 client.</param> /// <param name="config">The config object that has the number of threads to use.</param> /// <param name="fileTransporterRequest">The file transporter request.</param> internal MultipartUploadCommand(AmazonS3 s3Client, TransferUtilityConfig config, TransferUtilityUploadRequest fileTransporterRequest) { this._config = config; if (fileTransporterRequest.IsSetFilePath()) { this._logger.DebugFormat("Beginning upload of file {0}.", fileTransporterRequest.FilePath); } else { this._logger.DebugFormat("Beginning upload of stream."); } this._s3Client = s3Client; this._fileTransporterRequest = fileTransporterRequest; this._contentLength = this._fileTransporterRequest.ContentLength; if (fileTransporterRequest.IsSetPartSize()) this._partSize = fileTransporterRequest.PartSize; else this._partSize = calculatePartSize(this._contentLength); this._logger.DebugFormat("Upload part size {0}.", this._partSize); }
private void UploadResumeToAWS(Stream fileStream, string fileName) { string resume = ""; try { AWSCredentials awsCredentials = new BasicAWSCredentials(_accessKey, _secretAccess); AmazonS3Client amazonS3 = new AmazonS3Client(awsCredentials, Amazon.RegionEndpoint.USWest2); TransferUtility fileTransferUtility = new TransferUtility(amazonS3); TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest(); uploadRequest.BucketName = _existingBucketName; uploadRequest.InputStream = fileStream; string fileExtension = fileName.Split('.')[1]; uploadRequest.Key = Guid.NewGuid().ToString() + "." + fileExtension; resume = uploadRequest.Key; _uploadService.UpdateResume(resume); fileTransferUtility.Upload(uploadRequest); } catch (AmazonS3Exception s3Exception) { Console.WriteLine(s3Exception.Message, s3Exception.InnerException); } }
public void UploadFileToAWSS3(string KeyName, string FileWithPath) { try { var uploadRequest = new TransferUtilityUploadRequest { FilePath = FileWithPath, BucketName = BucketName, CannedACL = S3CannedACL.PublicRead, Key = KeyName }; var fileTransferUtility = new TransferUtility(S3Client); fileTransferUtility.Upload(uploadRequest); } catch (Exception ex) { //logger.LogError("UpdloadBinary exception. Message:'{0}' when writing an object. " + // "KeyName:'{1}' FileWithPath:'{2}'", ex.Message, KeyName, FileWithPath); } finally { File.Delete(FileWithPath); } }
public async Task <IActionResult> UploadFile(IFormFile file) { if (file == null || file.Length == 0) { return(Content("file not selected")); } var userId = userManager.GetUserId(HttpContext.User); var fileKey = $"{userId}_{file.FileName}_{Guid.NewGuid().ToString()}"; using (var stream = file.OpenReadStream()) { var uploadRequest = new TransferUtilityUploadRequest { InputStream = stream, Key = fileKey, BucketName = _awsAccess.S3BucketName, }; await fileTransferUtility.UploadAsync(uploadRequest); } var text = await this.Recognize(file); await dbContext.OcrElements.AddAsync(new Data.Ocr.OcrElement { ImageFileContentType = file.ContentType, ImageFilename = file.FileName, ImageFilenamePath = fileKey, OcrText = text, UserId = userId, }); await dbContext.SaveChangesAsync(); return(RedirectToAction("History")); }
/// <summary> /// Initializes a new instance of the <see cref="MultipartUploadCommand"/> class. /// </summary> /// <param name="s3Client">The s3 client.</param> /// <param name="config">The config object that has the number of threads to use.</param> /// <param name="fileTransporterRequest">The file transporter request.</param> internal MultipartUploadCommand(IAmazonS3 s3Client, TransferUtilityConfig config, TransferUtilityUploadRequest fileTransporterRequest) { this._config = config; if (fileTransporterRequest.IsSetFilePath()) { _logger.DebugFormat("Beginning upload of file {0}.", fileTransporterRequest.FilePath); } else { _logger.DebugFormat("Beginning upload of stream."); } this._s3Client = s3Client; this._fileTransporterRequest = fileTransporterRequest; this._contentLength = this._fileTransporterRequest.ContentLength; if (fileTransporterRequest.IsSetPartSize()) { this._partSize = fileTransporterRequest.PartSize; } else { this._partSize = calculatePartSize(this._contentLength); } if (fileTransporterRequest.InputStream != null) { if (fileTransporterRequest.AutoResetStreamPosition && fileTransporterRequest.InputStream.CanSeek) { fileTransporterRequest.InputStream.Seek(0, SeekOrigin.Begin); } } _logger.DebugFormat("Upload part size {0}.", this._partSize); }
public async Task <bool> UploadIndexFileAndDirectory(string file, string directory, OVEAssetModel asset) { _logger.LogInformation($"about to upload index {file} and directory {directory}"); using (var fileTransferUtility = new TransferUtility(GetS3Client(_configuration))) { // upload the index file var assetRootFolder = Path.GetDirectoryName(asset.StorageLocation); var filesKeyPrefix = assetRootFolder + "/" + new DirectoryInfo(directory).Name + "/"; // upload to the right folder TransferUtilityUploadRequest req = new TransferUtilityUploadRequest { BucketName = asset.Project, Key = assetRootFolder + "/" + Path.GetFileName(file), FilePath = file }; await fileTransferUtility.UploadAsync(req); // upload the tile files TransferUtilityUploadDirectoryRequest request = new TransferUtilityUploadDirectoryRequest() { KeyPrefix = filesKeyPrefix, Directory = directory, BucketName = asset.Project, SearchOption = SearchOption.AllDirectories, SearchPattern = "*.*" }; await fileTransferUtility.UploadDirectoryAsync(request); _logger.LogInformation($"finished upload for index {file} and directory {directory}"); return(true); } }
public async Task<string> UploadFileAsync(Stream fileStream, string keyName) { string urlLocation = null; _log.LogInformation("File Name: {0}", keyName); try { using (var client = new AmazonS3Client(RegionEndpoint.SAEast1)) { var bucketName = _configuration["Website:S3Bucket"]; var fileTransferUtility = new TransferUtility(client); var fileLocation = "usuarios/fotos/" + keyName; _log.LogInformation("location: {0}", fileLocation); urlLocation = $"{_configuration["Website:S3BucketUrl"]}/{fileLocation}"; var uploadRequest = new TransferUtilityUploadRequest(); uploadRequest.ContentType = "image/png"; uploadRequest.InputStream = fileStream; uploadRequest.Key = fileLocation; uploadRequest.CannedACL = S3CannedACL.PublicRead; uploadRequest.BucketName = bucketName; await fileTransferUtility.UploadAsync(uploadRequest); } } catch (AmazonS3Exception e) { _log.LogError("Error encountered on server when writing an object. Message:'{0}'", e.Message); } catch (Exception e) { _log.LogError("Unknown error encountered on server when writing an object. Message:'{0}'", e.Message); } return urlLocation; }
/// <summary> /// Awses the upload public file asynchronous. /// </summary> /// <param name="s3Client">The s3 client.</param> /// <param name="fileStream">The file stream.</param> /// <param name="amazonBucket">The amazon bucket.</param> /// <param name="amazonFileKey">The amazon file key.</param> /// <returns></returns> private async Task AWSUploadPublicFileAsync(IAmazonS3 s3Client, Stream fileStream, string amazonBucket, string amazonFileKey) { try { fileStream.Position = 0; TransferUtility fileTransferUtility = new TransferUtility(s3Client); TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest { BucketName = amazonBucket, Key = amazonFileKey, InputStream = fileStream, CannedACL = S3CannedACL.PublicRead, }; await fileTransferUtility.UploadAsync(uploadRequest).ConfigureAwait(false); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine($"AWSStorage.AWSUploadPublicFileAsync ERROR - [{ex.Message}]"); throw; } }
/// <summary> /// Upload the audio file to S3. /// </summary> /// <param name="id">The post ID in DynamoDB</param> /// <returns>The file's S3 URL</returns> private async Task <string> UploadAsync(string id) { AmazonS3Client client = new AmazonS3Client(); TransferUtility tu = new TransferUtility(client); string bucketName = Environment.GetEnvironmentVariable("BUCKET_NAME"); var uploadRequest = new TransferUtilityUploadRequest { BucketName = bucketName, FilePath = $"/tmp/{id}.mp3", StorageClass = S3StorageClass.Standard, Key = $"{id}.mp3", CannedACL = S3CannedACL.PublicRead }; await tu.UploadAsync(uploadRequest); string cdnDistribution = Environment.GetEnvironmentVariable("CLOUDFRONT_DISTRIBUTION"); string url = $"https://{cdnDistribution}/{id}.mp3"; return(url); }
public async Task Save(string id, Stream source, Dictionary <string, string> metadata = null) { await TryCatchAsync(id, async (identity) => { using (var transferUtility = CreateTransferUtility()) { var metadataCollection = _metadataCollectionFactory.Create(); metadataCollection.AddRange(metadata); metadataCollection[MetadataKeys.SaveTime] = RebusTime.Now.ToString("O"); var uploadRequest = new TransferUtilityUploadRequest { InputStream = source, BucketName = _options.BucketName, Key = identity.Key }; metadataCollection.SaveTo(uploadRequest.Metadata); await transferUtility.UploadAsync(uploadRequest); } return(true); }); }
public void TestMultipartUploadViaTransferUtility() { var transferConfig = new TransferUtilityConfig { MinSizeBeforePartUpload = 6000000 }; var transfer = new TransferUtility(Client, transferConfig); var content = new string('a', 7000000); var body = new MemoryStream(System.Text.UTF8Encoding.UTF8.GetBytes(content)); var uploadRequest = new TransferUtilityUploadRequest { BucketName = _bucketName, Key = "a-lot-of-as.txt", InputStream = body }; transfer.Upload(uploadRequest); using (var getResponse = Client.GetObject(_accesspointArn, uploadRequest.Key)) { var getBody = new StreamReader(getResponse.ResponseStream).ReadToEnd(); Assert.AreEqual(content, getBody); } }
public void UploadAsyncCancellationTest() { var fileName = UtilityMethods.GenerateName(Path.Combine("SimpleUploadTest", "CancellationTest")); var path = Path.Combine(basePath, fileName); UtilityMethods.GenerateFile(path, 20 * MEG_SIZE); TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest() { BucketName = testBucketName, Key = fileName, FilePath = path }; var tokenSource = new CancellationTokenSource(); CancellationToken token = tokenSource.Token; System.Threading.Tasks.Task uploadTask = null; using (var transferUtility = new TransferUtility(Client)) { uploadTask = transferUtility.UploadAsync(uploadRequest, token); tokenSource.CancelAfter(100); Assert.Throws <TaskCanceledException>( () => { try { uploadTask.Wait(); } catch (AggregateException e) { throw e.InnerException; } }); } }
public async Task <string> Save(byte[] file, string directory = null, string fileName = null) { fileName = this.ResolveFileName(fileName); var utility = new TransferUtility(_s3Client); var request = new TransferUtilityUploadRequest { Key = fileName, BucketName = this.ResolveBucketName(directory), InputStream = new MemoryStream(file) }; try { await utility.UploadAsync(request); return(fileName); } catch (Exception e) { _logger.LogError($"Failed to upload a file | fileName:{fileName}, directory:{directory}, bucketName:{bucketName}, error:{e.Message}"); return(null); } }
// POST: api/ImageDemo public async Task <IHttpActionResult> Post(dynamic paras) { var dataUri = new DataUri((string)paras.dataUri); if (!dataUri.IsSupported) { return(BadRequest()); } var objectName = String.Format("{0}.{1}", DateTime.Now.Ticks.ToString(), dataUri.Format); using (IAmazonS3 client = new AmazonS3Client(region: RegionEndpoint.APSoutheast2)) { var request = new TransferUtilityUploadRequest { BucketName = "meet.u", Key = objectName, InputStream = dataUri.ToStream }; await new TransferUtility(client).UploadAsync(request); } return(Ok(@"https://s3-ap-southeast-2.amazonaws.com/meet.u/" + objectName)); }
public async Task SaveAsync(string path, string contentType, Stream inputStream) { inputStream.Seek(0, SeekOrigin.Begin); var config = new AmazonS3Config() { ServiceURL = serviceUrl }; using (var client = AWSClientFactory.CreateAmazonS3Client(accessKey, secretKey, config)) { var transferUtility = new TransferUtility(client); var transferRequest = new TransferUtilityUploadRequest { BucketName = bucketName, Key = path, CannedACL = S3CannedACL.PublicRead, InputStream = inputStream, ContentType = contentType, }; await transferUtility.UploadAsync(transferRequest); } }
private string TransferFile( TEnum fileType, string fileName, byte[] buffer, long dateTicks, bool isPublic) { var key = string.Format( CultureInfo.InvariantCulture, "{0}/{1}/{2}", fileType, dateTicks, fileName); using (var stream = new MemoryStream(buffer)) { var headers = new HeadersCollection(); headers.ContentType = this.GetContentType(fileName); headers.CacheControl = string.Format("max-age={0}, must-revalidate", MAX_AGE); var request = new TransferUtilityUploadRequest(); request.Key = key; request.BucketName = this.BucketName; request.InputStream = stream; if (isPublic) { request.CannedACL = S3CannedACL.PublicRead; } var transferUtility = NewTransferUtility(); transferUtility.Upload(request); } return(key); }
public async Task <AddFileResponse> UploadFiles(string bucketName, IList <IFormFile> formFiles) { var response = new List <string>(); foreach (var file in formFiles) { var uploadRequest = new TransferUtilityUploadRequest() { InputStream = file.OpenReadStream(), Key = file.FileName, BucketName = bucketName, CannedACL = S3CannedACL.NoACL }; using (var transferUtility = new TransferUtility(_s3Client)) { await transferUtility.UploadAsync(uploadRequest); } var expiryUrlRquest = new GetPreSignedUrlRequest() { BucketName = bucketName, Key = file.FileName, Expires = DateTime.Now.AddDays(1) }; var url = _s3Client.GetPreSignedURL(expiryUrlRquest); response.Add(url); } return(new AddFileResponse { PreSignedURL = response }); }
public string StoreFile(Stream file) { try { using (var newMemoryStream = new MemoryStream()) { file.CopyTo(newMemoryStream); var uploadRequest = new TransferUtilityUploadRequest { InputStream = newMemoryStream, Key = Guid.NewGuid().ToString(), BucketName = _storageSettings.BucketName, CannedACL = S3CannedACL.Private }; var fileTransferUtility = new TransferUtility(_client); fileTransferUtility.UploadAsync(uploadRequest).Wait(); return($"https://s3.amazonaws.com/{_storageSettings.BucketName}/{uploadRequest.Key}"); } } catch (Exception e) { throw; } }
public void Save(byte[] file, string name, string folder) { try { var amazonS3 = new AmazonS3Client(accessKey, secretKey); using (var ms = new MemoryStream(file)) { var transferUtility = new TransferUtility(amazonS3); var transferUtilityRequest = new TransferUtilityUploadRequest() { BucketName = bucket, StorageClass = S3StorageClass.ReducedRedundancy, CannedACL = S3CannedACL.PublicRead, Key = string.IsNullOrEmpty(folder) ? name : folder.EndsWith("/") ? folder + name : folder + "/" + name, InputStream = ms }; transferUtility.Upload(transferUtilityRequest); } } catch (Exception) { throw; } }
private TransferUtilityUploadRequest ConstructRequest(string basePath, string filepath, string prefix) { string key = filepath.Substring(basePath.Length); key = key.Replace(@"\", "/"); if (key.StartsWith("/", StringComparison.Ordinal)) { key = key.Substring(1); } key = prefix + key; var uploadRequest = new TransferUtilityUploadRequest() { BucketName = this._request.BucketName, Key = key, FilePath = filepath, CannedACL = this._request.CannedACL, Metadata = this._request.Metadata, ContentType = this._request.ContentType, StorageClass = this._request.StorageClass, ServerSideEncryptionMethod = this._request.ServerSideEncryptionMethod, ServerSideEncryptionKeyManagementServiceKeyId = this._request.ServerSideEncryptionKeyManagementServiceKeyId, TagSet = this._request.TagSet, CalculateContentMD5Header = this._request.CalculateContentMD5Header, #if (BCL && !BCL45) Timeout = ClientConfig.GetTimeoutValue(this._config.DefaultTimeout, this._request.Timeout) #endif }; uploadRequest.UploadProgressEvent += new EventHandler <UploadProgressArgs>(UploadProgressEventCallback); // Raise event to allow subscribers to modify request _request.RaiseUploadDirectoryFileRequestEvent(uploadRequest); return(uploadRequest); }
private TransferUtilityUploadRequest SaveFile(UploadFileModel uploadFileModel) { try { var keyName = uploadFileModel.File.FileName; var filePath = Path.Combine(uploadFileModel.ServerMapPath, keyName); uploadFileModel.File.SaveAs(filePath); var fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = bucketName, FilePath = filePath, StorageClass = S3StorageClass.StandardInfrequentAccess, PartSize = 4096, Key = keyName, CannedACL = S3CannedACL.PublicRead }; return(fileTransferUtilityRequest); } catch (Exception ex) { throw new SaveFileException("An error occured while saving file or setting file settings."); } return(null); }
public void UploadFile() { //var transferUtility = new TransferUtility(client); //transferUtility.Upload(AppDomain.CurrentDomain.BaseDirectory + "\\test.txt",bucketName); //Console.WriteLine("File Uploaded Successfully"); //Console.ReadLine(); //var transferUtility = new TransferUtility(client); //transferUtility.UploadDirectory(AppDomain.CurrentDomain.BaseDirectory + "\\example", bucketName); //Console.WriteLine("File Uploaded Successfully"); //Console.ReadLine(); var transferUtility = new TransferUtility(client); var transferForRequest = new TransferUtilityUploadRequest { FilePath = AppDomain.CurrentDomain.BaseDirectory + "\\test.txt", CannedACL = S3CannedACL.PublicRead, BucketName = bucketName }; transferUtility.Upload(transferForRequest); Console.WriteLine("File Uploaded Successfully"); Console.ReadLine(); }
public async Task UploadCsvData(string userId, IEnumerable <Locations> locations) { try { using (var client = new AmazonS3Client(amazonOptions.Value.Key, amazonOptions.Value.Secret, RegionEndpoint.EUCentral1)) { var csvData = ConvertToCsv(userId, locations); var uploadRequest = new TransferUtilityUploadRequest { InputStream = GenerateStream(csvData), Key = $"{userId}.csv", BucketName = amazonOptions.Value.Bucket, CannedACL = S3CannedACL.Private }; var fileTransferUtility = new TransferUtility(client); await fileTransferUtility.UploadAsync(uploadRequest); } } catch (Exception ex) { logger.LogError(ex, nameof(UploadCsvData)); } }
public static async Task <string> UploadToS3Async(IToolLogger logger, IAmazonS3 s3Client, string bucket, string key, Stream stream) { logger?.WriteLine($"Uploading to S3. (Bucket: {bucket} Key: {key})"); var request = new TransferUtilityUploadRequest() { BucketName = bucket, Key = key, InputStream = stream }; request.UploadProgressEvent += Utilities.CreateTransferUtilityProgressHandler(logger); try { await new TransferUtility(s3Client).UploadAsync(request); } catch (Exception e) { throw new ToolsException($"Error uploading to {key} in bucket {bucket}: {e.Message}", ToolsException.CommonErrorCode.S3UploadError, e); } return(key); }
private async Task <bool> uploadObjectToS3(string keyName, string sourceFilePath) { bool uploaded = false; try { var fileTransferUtility = new TransferUtility(s3Client); TransferUtilityUploadRequest req = new TransferUtilityUploadRequest(); req.BucketName = setting.s3BucketName; req.Key = keyName; req.FilePath = sourceFilePath; req.UploadProgressEvent += new EventHandler <UploadProgressArgs>(uploadRequest_UploadPartProgressEvent); Console.SetCursorPosition(0, 0); Console.Write(new string(' ', Console.WindowWidth - 1)); Console.CursorLeft = 0; Console.Write("Bucket: " + setting.s3BucketName + ", Uploading :" + keyName); Console.SetCursorPosition(38, 7 ); await fileTransferUtility.UploadAsync(req);// uploads an object to s3. it will overwrite same key name. uploaded = true; } catch (AmazonS3Exception ex) { log.Error(string.Format("S3 Error encountered on server when uploading key={0}, bucket={1}.", keyName, setting.s3BucketName), ex); } catch (Exception ex) { log.Error(string.Format("Unknown S3 error on server when uploading key={0}, bucket={1}", keyName, setting.s3BucketName), ex); } return(uploaded); }
public void UploadFile(IFormFile file, string path) { try { using (var client = new AmazonS3Client(s3ClientConfig)) { using (var memoryStream = new MemoryStream()) { file.CopyTo(memoryStream); var uploadRequest = new TransferUtilityUploadRequest { InputStream = memoryStream, Key = file.FileName, BucketName = ImgServiceConfig.BUCKET_NAME + @"/" + path, CannedACL = S3CannedACL.PublicRead }; var fileTransferUtility = new TransferUtility(client); fileTransferUtility.UploadAsync(uploadRequest); } } } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { throw new Exception("Check the provided AWS Credentials."); } else { throw new Exception("Error occurred: " + amazonS3Exception.Message); } } }
public static string UploadPdfToS3(byte[] bytes, string filenameWithoutExtension) { var client = new AmazonS3Client( Environment.GetEnvironmentVariable("S3_AWS_ACCESS_KEY_ID"), Environment.GetEnvironmentVariable("S3_AWS_SECRET_ACCESS_KEY"), RegionEndpoint.USEast2); var transferUtility = new TransferUtility(client); var filename = $"{filenameWithoutExtension}{Guid.NewGuid()}.pdf"; using (var memoryStream = new MemoryStream(bytes)) { var request = new TransferUtilityUploadRequest { BucketName = bucketName, ContentType = "application/pdf", CannedACL = S3CannedACL.PublicRead, InputStream = memoryStream, Key = filename, StorageClass = S3StorageClass.Standard }; transferUtility.Upload(request); } return($"https://{bucketName}.s3.us-east-2.amazonaws.com/{filename}"); }
public async Task UploadAsync(Stream stream, string fullFileName, CancellationToken cancellationToken = default) { CheckDisposed(); if (stream is null) { throw new ArgumentNullException(nameof(stream)); } if (string.IsNullOrWhiteSpace(fullFileName)) { throw new ArgumentNullException(nameof(fullFileName)); } await EnsureBucketExistsAsync().ConfigureAwait(false); if (await InternalFileExistsAsync(fullFileName, cancellationToken).ConfigureAwait(false)) { throw new S3BucketException("Requested file to be uploaded to S3 bucket already exists in the bucket."); } var fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = BucketName, InputStream = stream, StorageClass = S3StorageClass.ReducedRedundancy, PartSize = PartSize, Key = AdjustKey(fullFileName), CannedACL = UploadCannedAcl }; using (var tranUtility = new TransferUtility(S3Client)) { await tranUtility.UploadAsync(fileTransferUtilityRequest, cancellationToken).ConfigureAwait(false); } }
//public UsuarioObtenerUrlImagenDto ObtenerUrlImagenPorId(long id) //{ // return _adUsuario.ObtenerUrlImagenPorId(id); //} //public int EliminarUrlImagen(long id) //{ // return _adUsuario.EliminarUrlImagen(id); //} public async Task <string> SubirImagenAws(RequestUsuarioModificarImagenMetodo1Dto entidad) { //int respuesta = 0; string url = string.Empty; try { var objetoImagenBd = _adUsuario.ObtenerUrlImagenPorId(entidad.IdUsuario); if (objetoImagenBd == null) { //url = string.Empty; //return -1; url = "-1"; return(url); } url = ConstanteVo.UrlAmazon; string nombreDirectorio = "Usuario"; EliminarImagenAws(objetoImagenBd.UrlImagen, entidad.IdUsuario); //int respuestaEliminar = //if (respuestaEliminar > 0) //{ using (var client = new AmazonS3Client( Infraestructura.Utilitario.Util.Desencriptar(ConstanteVo.AccessKeyAws), Infraestructura.Utilitario.Util.Desencriptar(ConstanteVo.SecretAccessKeyAws), RegionEndpoint.USEast2)) { string nombreArchivo = string.Format("{0}_{1}{2}{3}_{4}{5}{6}_{7}.{8}", entidad.IdUsuario, DateTime.Now.Year.ToString("d4"), DateTime.Now.Month.ToString("d2"), DateTime.Now.Day.ToString("d2"), DateTime.Now.Hour.ToString("d2"), DateTime.Now.Minute.ToString("d2"), DateTime.Now.Second.ToString("d2"), DateTime.Now.Millisecond.ToString("d3"), entidad.ExtensionSinPunto); url = string.Format("{0}{1}/{2}", url, nombreDirectorio, nombreArchivo); using (var ms = new MemoryStream(entidad.ArchivoBytes)) { var uploadRequest = new TransferUtilityUploadRequest { InputStream = ms, Key = nombreArchivo, BucketName = string.Format("encuentralo/{0}", nombreDirectorio), CannedACL = S3CannedACL.PublicRead }; var fileTransferUtility = new TransferUtility(client); await fileTransferUtility.UploadAsync(uploadRequest); } } //LnUsuario lnUsuario = new LnUsuario(); int respuestaBd = ModificarUrlImagenPorIdUsuario(entidad.IdUsuario, url); if (respuestaBd == 0) { url = "0"; } //} } catch (AmazonS3Exception exSe) { Log(Level.Error, String.Format("AmazonS3Exception: {0}", exSe)); } catch (Exception ex) { Log(Level.Error, String.Format("Exception: {0}", ex)); } return(url);// respuesta; }
public IActionResult UploadFile(Upload up) { ViewBag.emailError = ""; ViewBag.fileError = ""; if (ModelState.IsValid) { var uniqueFileName = Guid.NewGuid() + up.file.FileName; // uniquely identify files string presignedURL = ""; // the presigned url to send to user var client = new AmazonS3Client(_awsAccessKeyId, _awssecretkey, _bucketRegion); // client session used to talk to s3 bucket. /* The function that uploads a file, and then emails it to the user.*/ try { using (var memstr = new MemoryStream()) { up.file.CopyTo(memstr); var uploadReq = new TransferUtilityUploadRequest { InputStream = memstr, Key = uniqueFileName, BucketName = _bucketname, CannedACL = S3CannedACL.PublicRead }; var fileTransferUtility = new TransferUtility(client); fileTransferUtility.Upload(uploadReq); } } catch (Exception e) { /* There was an error uploading the file */ ViewBag.gError = e; return(View()); } try { GetPreSignedUrlRequest preUrlReq = new GetPreSignedUrlRequest { BucketName = _bucketname, Key = uniqueFileName, Expires = DateTime.Now.AddDays(7) // The max length for a presigned url to exist is 7 days, so I will use that. }; presignedURL = client.GetPreSignedURL(preUrlReq); } catch (Exception e) { /*There was an error getting the presigned url */ ViewBag.gError = e; return(View()); } try { using (MailMessage mail = new MailMessage()) { mail.From = new MailAddress(""); mail.To.Add(up.email); mail.Subject = "Here is your presigned URL!"; mail.Body = presignedURL + "\n\nThis URL will only persist for 7 days."; using (SmtpClient sc = new SmtpClient("smtp.gmail.com", 587)) { sc.Credentials = new NetworkCredential("*****@*****.**", ""); sc.EnableSsl = true; sc.Send(mail); } } } catch (Exception e) { /* There was an error sending the email */ ViewBag.gError = e; return(View()); } return(RedirectToAction("Success")); // success is assumed if it has reached this point } else { if (up.email == null) { ViewBag.emailError = "Email is required!"; } if (up.file == null) { ViewBag.fileError = "A valid filepath is required!"; } } return(View()); // if execution reaches here. there was an error. }
public override void Execute() { string prefix = string.Empty; if (this._request.IsSetKeyPrefix()) { prefix = this._request.KeyPrefix; prefix = prefix.Replace(@"\", "/"); if (prefix.StartsWith("/")) prefix = prefix.Substring(1); if (!prefix.EndsWith("/")) { prefix += "/"; } } string basePath = new DirectoryInfo(this._request.Directory).FullName; string searchPattern = string.IsNullOrEmpty(this._request.SearchPattern) ? "*" : this._request.SearchPattern; string[] filePaths = Directory.GetFiles(basePath, searchPattern, this._request.SearchOption); this._totalNumberOfFiles = filePaths.Length; foreach (string filepath in filePaths) { this._currentFile = filepath; string key = filepath.Substring(basePath.Length); key = key.Replace(@"\", "/"); if(key.StartsWith("/")) key = key.Substring(1); key = prefix + key; TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest() .WithBucketName(this._request.BucketName) .WithKey(key) .WithFilePath(filepath) .WithCannedACL(this._request.CannedACL) .WithTimeout(this._request.Timeout) .WithMetadata(this._request.Metadata) .WithStorageClass(this._request.StorageClass) .WithServerSideEncryptionMethod(this._request.ServerSideEncryptionMethod) .WithBeforeRequestHandler(RequestEventHandler) as TransferUtilityUploadRequest; uploadRequest.UploadProgressEvent += new EventHandler<UploadProgressArgs>(uploadProgressEventCallback); this._utility.Upload(uploadRequest); this._numberOfFilesUploaded++; } }
public override void Execute() { string prefix = string.Empty; if (this._request.IsSetKeyPrefix()) { prefix = this._request.KeyPrefix; prefix = prefix.Replace(@"\", "/"); if (prefix.StartsWith("/")) prefix = prefix.Substring(1); if (!prefix.EndsWith("/")) { prefix += "/"; } } string basePath = new DirectoryInfo(this._request.Directory).FullName; string searchPattern = string.IsNullOrEmpty(this._request.SearchPattern) ? "*" : this._request.SearchPattern; string[] filePaths = Directory.GetFiles(basePath, searchPattern, this._request.SearchOption); this._totalNumberOfFiles = filePaths.Length; foreach (string filepath in filePaths) { this._currentFile = filepath; string key = filepath.Substring(basePath.Length); key = key.Replace(@"\", "/"); if(key.StartsWith("/")) key = key.Substring(1); key = prefix + key; TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest { BucketName = this._request.BucketName, Key = key, FilePath = filepath, CannedACL = this._request.CannedACL, Timeout = this._request.Timeout, StorageClass = this._request.StorageClass, ServerSideEncryptionMethod = this._request.ServerSideEncryptionMethod }; // metadata foreach (string metadaKey in this._request.Metadata.AllKeys) { var value = this._request.Metadata[metadaKey]; uploadRequest.WithMetadata(metadaKey, value); } uploadRequest.BeforeRequestEvent += RequestEventHandler; uploadRequest.UploadProgressEvent += new EventHandler<UploadProgressArgs>(uploadProgressEventCallback); this._utility.Upload(uploadRequest); this._numberOfFilesUploaded++; } }
private static void SetMetadataAndHeaders(TransferUtilityUploadRequest request) { SetMetadata(request.Metadata); SetHeaders(request.Headers); }