TransferUtility
public async Task UploadFile(string name,IStorageFile storageFile) { var s3Client = new AmazonS3Client(credentials, RegionEndpoint.USEast1); var transferUtilityConfig = new TransferUtilityConfig { ConcurrentServiceRequests = 5, MinSizeBeforePartUpload = 20 * MB_SIZE, }; try { using (var transferUtility = new TransferUtility(s3Client, transferUtilityConfig)) { var uploadRequest = new TransferUtilityUploadRequest { BucketName = ExistingBucketName, Key = name, StorageFile = storageFile, // Set size of each part for multipart upload to 10 MB PartSize = 10 * MB_SIZE }; uploadRequest.UploadProgressEvent += OnUploadProgressEvent; await transferUtility.UploadAsync(uploadRequest); } } catch (AmazonServiceException ex) { // oResponse.OK = false; // oResponse.Message = "Network Error when connecting to AWS: " + ex.Message; } }
public override IEnumerable<PvcStream> Execute(IEnumerable<PvcStream> inputStreams) { var filteredInputStreams = FilterUploadedFiles(inputStreams); var transfer = new TransferUtility(this.s3client); foreach (var inputStream in filteredInputStreams) { if (inputStream.StreamName == null || inputStream.StreamName.Length == 0) continue; var uploadReq = new TransferUtilityUploadRequest(); uploadReq.BucketName = this.bucketName; uploadReq.InputStream = inputStream; uploadReq.Key = this.StreamNameToKey(inputStream.StreamName); uploadReq.Headers.ContentMD5 = this.keyMD5Sums[uploadReq.Key]; uploadReq.Headers.ContentType = MimeMapping.GetMimeMapping(inputStream.StreamName); if (inputStream.Tags.Contains("gzip")) { uploadReq.Headers.ContentEncoding = "gzip"; } transfer.Upload(uploadReq); }; return inputStreams; }
static void Main(string[] args) { try { TransferUtility fT = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.USEast1)); string fileKey = genKey(); TransferUtilityUploadRequest uR = new TransferUtilityUploadRequest { BucketName = bucketName, FilePath = filepath, CannedACL = S3CannedACL.PublicRead, Key = fileKey }; uR.Metadata.Add("Title", "Tiger"); fT.Upload(uR); Console.WriteLine("File Uploaded. Access \"S3.amazonaws.com/sheltdev/" + fileKey ); Console.ReadKey(false); } catch (AmazonS3Exception e) { Console.WriteLine(e.Message, e.InnerException); Console.ReadKey(false); } }
protected override void ExecuteS3Task() { if ( !File.Exists( this.SourceFile ) ) { throw new BuildException( "source-file does not exist: " + this.SourceFile ); } using ( TransferUtility transferUtility = new Amazon.S3.Transfer.TransferUtility( this.AccessKey, this.SecretAccessKey ) ) { TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest { BucketName = this.BucketName, FilePath = this.SourceFile, Key = this.DestinationFile }; if ( PublicRead ) { uploadRequest.AddHeader( "x-amz-acl", "public-read" ); } transferUtility.Upload( uploadRequest ); } }
public static void UploadFile(System.Tuple<string,string, DateTime> file, string existingBucketName) { NameValueCollection appConfig = ConfigurationManager.AppSettings; string accessKeyID = appConfig["AWSAccessKey"]; string secretAccessKey = appConfig["AWSSecretKey"]; try { TransferUtility fileTransferUtility = new TransferUtility(accessKeyID, secretAccessKey); // Use TransferUtilityUploadRequest to configure options. // In this example we subscribe to an event. TransferUtilityUploadRequest uploadRequest = new TransferUtilityUploadRequest() .WithBucketName(existingBucketName) .WithFilePath(file.Item1) .WithServerSideEncryptionMethod(ServerSideEncryptionMethod.AES256) .WithKey(file.Item2 + file.Item3.ToString("ddmmyyyymmmmhhss")); uploadRequest.UploadProgressEvent += new EventHandler<UploadProgressArgs> (uploadRequest_UploadPartProgressEvent); fileTransferUtility.Upload(uploadRequest); } catch (ArgumentException e) { Console.WriteLine(e.Message); } catch (AmazonS3Exception e) { Console.WriteLine(e.Message + e.InnerException); } }
public void SetFileToS3(string _local_file_path, string _bucket_name, string _sub_directory, string _file_name_S3) { // Gelen Değerler : // _local_file_path : Lokal dosya yolu örn. "d:\filename.zip" // _bucket_name : S3 teki bucket adı ,Bucket önceden oluşturulmuş olmalıdır. // _sub_directory : Boş değilse S3 içinde klasör oluşturulur yada varsa içine ekler dosyayı. // _file_name_S3 : Dosyanın S3 içindeki adı // IAmazonS3 class'ı oluşturuyoruz ,Benim lokasyonum RegionEndpoint.EUCentral1 onun için onu seçiyorum // Sizde yüklemek istediğiniz bucket 'ın lokasyonuna göre değiştirmelisiniz. IAmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.EUCentral1); // Bir TransferUtility oluşturuyoruz(Türkçesi : Aktarım Programı). utility = new TransferUtility(client); // TransferUtilityUploadRequest oluşturuyoruz request = new TransferUtilityUploadRequest(); if (_sub_directory == "" || _sub_directory == null) { request.BucketName = _bucket_name; //Alt klasör girmediysek direk bucket'ın içine atıyor. } else { // Alt Klasör ve Bucket adı request.BucketName = _bucket_name + @"/" + _sub_directory; } request.Key = _file_name_S3; //Dosyanın S3 teki adı request.FilePath = _local_file_path; //Lokal Dosya Yolu }
public List<MessageAttachment> Upload(IEnumerable<LocalResource> localResources) { List<MessageAttachment> result = new List<MessageAttachment>(); if (null != localResources) { foreach (var res in localResources) { Guid guid = Guid.NewGuid(); string fileName = Path.GetFileName(res.LocalPath); string key = guid.ToString() + fileName; TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() .WithBucketName(m_BucketName) .WithFilePath(res.LocalPath) .WithSubscriber(this.UploadFileProgressCallback) .WithCannedACL(S3CannedACL.PublicRead) .WithKey(key); m_s3transferUtility.Upload(request); MessageAttachment attachment = new MessageAttachment(new Uri(m_CloudFrontRoot, key), res.Description ?? fileName); result.Add(attachment); } } return result; }
//Pushes file to Amazon S3 with public read permissions public bool UploadFile(string localFile,string fileName, string contentType) { IAmazonS3 client = GetS3Client(); var result = false; try { var request = new TransferUtilityUploadRequest { BucketName = _BucketName, Key = _Prefix+fileName, FilePath = localFile, StorageClass = S3StorageClass.Standard, CannedACL = S3CannedACL.PublicRead, ContentType = contentType }; var fileTransferUtility = new TransferUtility(client); fileTransferUtility.Upload(request); //PutObjectResponse response2 = client.PutObject(request); result = true; } catch { return result; } return result; }
private string uploadImagem(string filename, Stream fileStream) { TransferUtility fileTransferUtility = new TransferUtility(System.Configuration.ConfigurationManager.AppSettings["AccessKey"].ToString(), System.Configuration.ConfigurationManager.AppSettings["SecretKey"].ToString()); var uploadRequest = new Amazon.S3.Transfer.TransferUtilityUploadRequest(); uploadRequest.InputStream = fileStream;//flImagem.PostedFile.InputStream; uploadRequest.BucketName = "BEPiD"; uploadRequest.Key = filename; uploadRequest.StorageClass = S3StorageClass.ReducedRedundancy; uploadRequest.CannedACL = S3CannedACL.PublicRead; fileTransferUtility.Upload(uploadRequest); //lblImagem.Text = "<img src='http://s3.amazonaws.com/BEPiD/" + filename.ToString() + "' style='border-radius:30px;'/>"; string urlImagem = "http://s3.amazonaws.com/BEPiD/" + filename.ToString(); return(urlImagem); //HttpWebRequest httpWebRequest = (HttpWebRequest)HttpWebRequest.Create(urlImagem); //HttpWebResponse httpWebReponse = (HttpWebResponse)httpWebRequest.GetResponse(); //Stream stream = httpWebReponse.GetResponseStream(); //System.Drawing.Image objImage = System.Drawing.Image.FromStream(stream); //w = objImage.Width; //h = objImage.Height; }
public void SendDocument(string filePath, string bucket, string destinationPath, string fileNamOnDestinationWithExtension = "index.html", bool isPublic = false) { try { var transferUtility = new TransferUtility(amazonS3Client); if (!transferUtility.S3Client.DoesS3BucketExist(bucket)) transferUtility.S3Client.PutBucket(new PutBucketRequest { BucketName = bucket }); var request = new TransferUtilityUploadRequest { BucketName = bucket, Key = string.Format("{0}/{1}", destinationPath, fileNamOnDestinationWithExtension), FilePath = filePath }; if (isPublic) request.Headers["x-amz-acl"] = "public-read"; request.UploadProgressEvent += uploadFileProgressCallback; transferUtility.Upload(request); transferUtility.Dispose(); } catch (Exception ex) { throw new Exception("Error send file to S3. " + ex.Message); } }
public void UploadFile(string filePath, string toPath) { AsyncCallback callback = new AsyncCallback(uploadComplete); var uploadRequest = new TransferUtilityUploadRequest(); uploadRequest.FilePath = filePath; uploadRequest.BucketName = bucketName; uploadRequest.Key = toPath; transferUtility.BeginUpload(uploadRequest, callback, null); }
public void UploadFile(string filePath, string toPath) { AsyncCallback callback = new AsyncCallback(uploadComplete); var uploadRequest = new TransferUtilityUploadRequest(); uploadRequest.FilePath = filePath; uploadRequest.BucketName = bucketName; uploadRequest.Key = toPath; //uploadRequest.AddHeader("x-amz-acl", "private"); uploadRequest.UploadProgressEvent += uploadRequest_UploadProgressEvent; transferUtility.BeginUpload(uploadRequest, callback, toPath); }
public HttpResponseMessage ExternalPost() { HttpResponseMessage result = null; HttpRequest httpRequest = HttpContext.Current.Request; TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(ConfigService.AwsAccessKeyId , ConfigService.AwsSecretAccessKey , Amazon.RegionEndpoint.USWest2)); if (httpRequest.Files.Count > 0) { foreach (string file in httpRequest.Files) { HttpPostedFile postedFile = httpRequest.Files[file]; string guid = Guid.NewGuid().ToString(); string remoteFilePath = ConfigService.RemoteFilePath + guid + "_" + postedFile.FileName; TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = ConfigService.BucketName, //FilePath = filePath, InputStream = postedFile.InputStream, //StorageClass = S3StorageClass.ReducedRedundancy, //PartSize = 6291456, // 6 MB. Key = remoteFilePath, //CannedACL = S3CannedACL.PublicRead }; fileTransferUtility.Upload(fileTransferUtilityRequest); string paraRemoteFilePath = "/" + remoteFilePath; ItemResponse<string> response = new ItemResponse<string>(); string userId = UserService.GetCurrentUserId(); ProfileService.UpdatePhotoPath(userId, paraRemoteFilePath); response.Item = remoteFilePath; return Request.CreateResponse(HttpStatusCode.Created, response.Item); } } else { result = Request.CreateResponse(HttpStatusCode.BadRequest); } return result; }
static void Main(string[] args) { try { TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.USEast1)); // 1. Upload a file, file name is used as the object key name. fileTransferUtility.Upload(filePath, existingBucketName); Console.WriteLine("Upload 1 completed"); // 2. Specify object key name explicitly. fileTransferUtility.Upload(filePath, existingBucketName, keyName); Console.WriteLine("Upload 2 completed"); // 3. Upload data from a type of System.IO.Stream. using (FileStream fileToUpload = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { fileTransferUtility.Upload(fileToUpload, existingBucketName, keyName); } Console.WriteLine("Upload 3 completed"); // 4.Specify advanced settings/options. TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = existingBucketName, FilePath = filePath, StorageClass = S3StorageClass.ReducedRedundancy, PartSize = 5242880, // 5 MB. Key = keyName, CannedACL = S3CannedACL.PublicRead }; fileTransferUtilityRequest.Metadata.Add("param1", "Value1"); fileTransferUtilityRequest.Metadata.Add("param2", "Value2"); fileTransferUtility.Upload(fileTransferUtilityRequest); Console.WriteLine("Upload 4 completed"); } catch (AmazonS3Exception s3Exception) { Console.WriteLine(s3Exception.Message, s3Exception.InnerException); } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
private void UploadFileToAmazon(string bucketName, string localFilePath) { try { TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.BucketName = bucketName; request.FilePath = localFilePath; utility.Upload(request); File.Delete(localFilePath); } catch (Exception ex) { Helpers.Helpers.LogExceptions(ex.Message); } }
public IPhoto UploadPhoto(Stream stream, string filename, string title, string descriptioSn, string tags) { TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.InputStream = stream; request.BucketName = photoBucket; request.Key = filename; request.CannedACL = Amazon.S3.Model.S3CannedACL.PublicRead; TransferUtility transferUtility = new TransferUtility(ConfigurationManager.AppSettings["AWSAccessKey"], ConfigurationManager.AppSettings["AWSSecretKey"]); transferUtility.Upload(request); S3Photo photo = new S3Photo(); photo.WebUrl = string.Format("http://s3.amazonaws.com/{0}/{1}", photoBucket, filename); photo.Title = filename; return photo; }
private void UploadFileToS3(string filePath,string bucketname) { var awsAccessKey = accesskey; var awsSecretKey = secretkey; var existingBucketName = bucketname; var client = Amazon.AWSClientFactory.CreateAmazonS3Client(awsAccessKey, awsSecretKey, RegionEndpoint.USEast1); var uploadRequest = new TransferUtilityUploadRequest { FilePath = filePath, BucketName = existingBucketName, CannedACL = S3CannedACL.PublicRead }; var fileTransferUtility = new TransferUtility(client); fileTransferUtility.UploadAsync(uploadRequest); }
protected override void InnerExecute(string[] arguments) { _writer.WriteLine("Getting upload credentials... "); _writer.WriteLine(); var uploadCredentials = GetCredentials(); var temporaryFileName = Path.GetTempFileName(); try { using (var packageStream = new FileStream(temporaryFileName, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)) using (var gzipStream = new GZipStream(packageStream, CompressionMode.Compress, true)) { var sourceDirectory = new DirectoryInfo(Directory.GetCurrentDirectory()); sourceDirectory.ToTar(gzipStream, excludedDirectoryNames: _excludedDirectories.ToArray()); } using (var s3Client = new AmazonS3Client(uploadCredentials.GetSessionCredentials())) using (var transferUtility = new TransferUtility(s3Client)) { var request = new TransferUtilityUploadRequest { FilePath = temporaryFileName, BucketName = uploadCredentials.Bucket, Key = uploadCredentials.ObjectKey, Timeout = (int)TimeSpan.FromHours(2).TotalMilliseconds, }; var progressBar = new MegaByteProgressBar(); request.UploadProgressEvent += (object x, UploadProgressArgs y) => progressBar .Update("Uploading package", y.TransferredBytes, y.TotalBytes); transferUtility.Upload(request); Console.CursorTop++; _writer.WriteLine(); } } finally { File.Delete(temporaryFileName); } TriggerAppHarborBuild(uploadCredentials); }
public bool sendMyFileToS3(string localFilePath, string bucketName, string subDirectoryInBucket, System.IO.Stream stream, string fileName) { // input explained : // localFilePath = the full local file path e.g. "c:\mydir\mysubdir\myfilename.zip" // bucketName : the name of the bucket in S3 ,the bucket should be alreadt created // subDirectoryInBucket : if this string is not empty the file will be uploaded to // a subdirectory with this name // fileNameInS3 = the file name in the S3 // create an instance of IAmazonS3 class ,in my case i choose RegionEndpoint.EUWest1 // you can change that to APNortheast1 , APSoutheast1 , APSoutheast2 , CNNorth1 // SAEast1 , USEast1 , USGovCloudWest1 , USWest1 , USWest2 . this choice will not // store your file in a different cloud storage but (i think) it differ in performance // depending on your location IAmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(Amazon.RegionEndpoint.USEast1); // create a TransferUtility instance passing it the IAmazonS3 created in the first step TransferUtility utility = new TransferUtility(client); // making a TransferUtilityUploadRequest instance var request = new TransferUtilityUploadRequest(); var uploadRequest = new TransferUtilityUploadRequest { InputStream = stream, BucketName = bucketName, CannedACL = S3CannedACL.PublicRead, Key = fileName }; if (subDirectoryInBucket == "" || subDirectoryInBucket == null) { request.BucketName = bucketName; //no subdirectory just bucket name } else { // subdirectory and bucket name request.BucketName = bucketName + @"/" + subDirectoryInBucket; } //request.Key = fileNameInS3; //file name up in S3 request.FilePath = localFilePath; //local file name utility.Upload(uploadRequest); //commensing the transfer return true; //indicate that the file was sent }
public bool SendFileToS3(string fileNameInS3) { string localFilePath = string.Concat(@"C:\source\MyBookLibrary\MyBookLibrary.Data\Database\", fileNameInS3); using (_client = new Amazon.S3.AmazonS3Client(Amazon.RegionEndpoint.USEast1)) { TransferUtility utility = new TransferUtility(_client); TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.BucketName = BucketName; request.Key = fileNameInS3; request.FilePath = localFilePath; utility.Upload(request); Console.Write($"File {fileNameInS3} uploaded."); return true; } }
public bool Upload(string userID, string fileName, Stream file) { bool retval = false; try { TransferUtility fileTransferUtility = new TransferUtility(new AmazonS3Client(Amazon.RegionEndpoint.APSoutheast1)); TransferUtilityUploadRequest request = new TransferUtilityUploadRequest(); request.InputStream = file; request.Key = fileName; request.BucketName = userID; request.CannedACL = S3CannedACL.PublicRead; fileTransferUtility.Upload(request);//file, userID, fileName); } catch (AmazonS3Exception amazonS3Exception) { throw amazonS3Exception; } return retval; }
public static async Task UploadFile(dynamic token, string filePath) { try { string path = token.path; string accessKeyId = token.accessKeyId; string secretAccessKey = token.secretAccessKey; string sessionToken = token.sessionToken; string bucket = token.bucket; string keyName = string.Format("{0}/{1}", path, Path.GetFileName(filePath)); var client = new AmazonS3Client(accessKeyId, secretAccessKey, sessionToken, RegionEndpoint.APSoutheast2); var fileTransferUtility = new TransferUtility(client); var request = new TransferUtilityUploadRequest { BucketName = bucket, FilePath = filePath, Key = keyName, PartSize = 6291456, // 6 MB. ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256 }; await fileTransferUtility.UploadAsync(request); Trace.WriteLine(token.uploadPassword); } catch (AmazonS3Exception s3Exception) { Console.WriteLine(s3Exception.Message, s3Exception.InnerException); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
void validate(TransferUtilityUploadRequest request) { if (request == null) { throw new ArgumentNullException("request"); } if (!request.IsSetBucketName()) { throw new ArgumentNullException("bucketName"); } if (!request.IsSetFilePath() && !request.IsSetInputStream()) { throw new ArgumentException( "Please specify either a Filename or provide a Stream to PUT an object into Amazon S3."); } if (!request.IsSetKey()) { if (request.IsSetFilePath()) { request.Key = new FileInfo(request.FilePath).Name; } else { throw new ArgumentException( "The Key property must be specified when using a Stream to upload into Amazon S3."); } } if (request.IsSetFilePath() && !File.Exists(request.FilePath)) { throw new ArgumentException("The file indicated by the FilePath property does not exist!"); } }
/// <summary> /// Constructs a new UploadDirectoryFileRequestArgs instance. /// </summary> /// <param name="request">Request being processed.</param> public UploadDirectoryFileRequestArgs(TransferUtilityUploadRequest request) { UploadRequest = request; }
/// <summary> /// Uploads the contents of the specified stream. /// For large uploads, the file will be divided and uploaded in parts using /// Amazon S3's multipart API. The parts will be reassembled as one object in /// Amazon S3. /// </summary> /// <param name="stream"> /// The stream to read to the content of the object. /// </param> /// <param name="bucketName"> /// The name of the bucket to upload the stream to. /// </param> /// <param name="key"> /// The key under which the Amazon S3 object is stored. /// </param> public void Upload(Stream stream, string bucketName, string key) { if (stream == null) { throw new ArgumentNullException("stream"); } if (string.IsNullOrEmpty(key)) { throw new ArgumentNullException("key"); } TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() .WithBucketName(bucketName) .WithKey(key); request.InputStream = stream; Upload(request); }
/// <summary> /// Uploads the specified file. /// Multiple threads are used to read the file and perform multiple uploads in parallel. /// For large uploads, the file will be divided and uploaded in parts using /// Amazon S3's multipart API. The parts will be reassembled as one object in /// Amazon S3. /// </summary> /// <param name="filePath"> /// The file path of the file to upload. /// </param> /// <param name="bucketName"> /// The name of the bucket to upload the file to. /// </param> /// <param name="key"> /// The key under which the Amazon S3 object is stored. /// </param> public void Upload(string filePath, string bucketName, string key) { if (string.IsNullOrEmpty(filePath)) { throw new ArgumentNullException("filePath"); } if (!File.Exists(filePath)) { throw new ArgumentException(string.Format("The file {0} does not exists!", filePath)); } TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() .WithBucketName(bucketName) .WithKey(key) .WithFilePath(filePath); Upload(request); }
/// <summary> /// Initiates the asynchronous execution of the Upload operation. /// <seealso cref="M:Amazon.S3.Transfer.TransferUtility.Upload"/> /// </summary> /// <param name="request"> /// Contains all the parameters used for uploading to Amazon S3. /// </param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback procedure using the AsyncState property.</param> /// <exception cref="T:System.ArgumentNullException"></exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; /// this value is also needed when invoking EndUpload.</returns> public IAsyncResult BeginUpload(TransferUtilityUploadRequest request, AsyncCallback callback, object state) { validate(request); BaseCommand command; if (request.ContentLength < this._config.MinSizeBeforePartUpload) { command = new SimpleUploadCommand(this._s3Client, this._config, request); } else { command = new MultipartUploadCommand(this._s3Client, this._config, request); } return beginOperation(command, callback, state); }
/// <summary> /// Initiates the asynchronous execution of the Upload operation. /// <seealso cref="M:Amazon.S3.Transfer.TransferUtility.Upload"/> /// </summary> /// <param name="stream"> /// The stream to read to the content of the object. /// </param> /// <param name="bucketName"> /// The name of the bucket to upload the stream to. /// </param> /// <param name="key"> /// The key under which the Amazon S3 object is stored. /// </param> /// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param> /// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback procedure using the AsyncState property.</param> /// <exception cref="T:System.ArgumentNullException"></exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>An IAsyncResult that can be used to poll or wait for results, or both; /// this value is also needed when invoking EndUpload.</returns> public IAsyncResult BeginUpload(Stream stream, string bucketName, string key, AsyncCallback callback, object state) { if (stream == null) { throw new ArgumentNullException("stream"); } if (string.IsNullOrEmpty(key)) { throw new ArgumentNullException("key"); } TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() .WithBucketName(bucketName) .WithKey(key); request.InputStream = stream; return BeginUpload(request, callback, state); }
/// <summary> /// Uploads the file or stream specified by the request. /// To track the progress of the upload, /// add an event listener to the request's <c>UploadProgressEvent</c>. /// For large uploads, the file will be divided and uploaded in parts using /// Amazon S3's multipart API. The parts will be reassembled as one object in /// Amazon S3. /// </summary> /// <remarks> /// <para> /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request. /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload. /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts, /// you should manually invoke TransferUtility.AbortMultipartUploadsAsync() to abort the incomplete multipart uploads. /// </para> /// </remarks> /// <param name="request"> /// Contains all the parameters required to upload to Amazon S3. /// </param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// <returns>The task object representing the asynchronous operation.</returns> public Task UploadAsync(TransferUtilityUploadRequest request, CancellationToken cancellationToken = default(CancellationToken)) { var command = GetUploadCommand(request, null); return(command.ExecuteAsync(cancellationToken)); }
protected void cmdCadastrar_Click(object sender, EventArgs e) { try { if (Page.IsValid) { //atualizar o banco de dados. //HttpCookie Session = Request.Cookies["BEPiDUCB.Site"]; AplicativoDTO _aplicativoDTO = new AplicativoDTO(); _aplicativoDTO.idCategoria = int.Parse(cmbCategoria.SelectedValue.ToString()); _aplicativoDTO.linkAplicativo = txtLink.Text; _aplicativoDTO.nomeAplicativo = txtNome.Text; _aplicativoDTO.situacao = "P"; _aplicativoDTO.nomeGrupoAplicativo = txtGrupo.Text; _aplicativoDTO.idAluno = int.Parse(Session["I"].ToString()); //imagem int tamanho = flImagem.PostedFile.ContentLength; if (tamanho == 0) { lblResultado.Text = "Por favor envie a logomarca do aplicativo"; return; } if (tamanho <= 4000000) { string filename = System.IO.Path.GetFileName(flImagem.PostedFile.FileName); filename = Guid.NewGuid() + filename; TransferUtility fileTransferUtility = new TransferUtility(System.Configuration.ConfigurationManager.AppSettings["AccessKey"].ToString(), System.Configuration.ConfigurationManager.AppSettings["SecretKey"].ToString()); var uploadRequest = new Amazon.S3.Transfer.TransferUtilityUploadRequest(); uploadRequest.InputStream = flImagem.PostedFile.InputStream; uploadRequest.BucketName = "BEPiD"; uploadRequest.Key = filename; uploadRequest.StorageClass = S3StorageClass.ReducedRedundancy; uploadRequest.CannedACL = S3CannedACL.PublicRead; fileTransferUtility.Upload(uploadRequest); //lblImagem.Text = "<img src='http://s3.amazonaws.com/BEPiD/" + filename.ToString() + "' style='border-radius:30px;'/>"; string urlImagem = "http://s3.amazonaws.com/BEPiD/" + filename.ToString(); HttpWebRequest httpWebRequest = (HttpWebRequest)HttpWebRequest.Create(urlImagem); HttpWebResponse httpWebReponse = (HttpWebResponse)httpWebRequest.GetResponse(); Stream stream = httpWebReponse.GetResponseStream(); System.Drawing.Image objImage = System.Drawing.Image.FromStream(stream); int w = objImage.Width; int h = objImage.Height; if (w <= 120 && h <= 120) { _aplicativoDTO.imagemAplicativo = filename.ToString(); } } else { lblResultado.Text = "A imagem deve respeitar o tamanho informado. Dimensões: 120 x 120."; } AplicativoBRL _aplicativoBRL = new AplicativoBRL(); if (_aplicativoBRL.insertAplicativo(_aplicativoDTO)) { Response.Redirect("AddAplicativo?id=sucesso"); } } } catch (Exception ex) { throw ex; } }
/// <summary> /// This method is called in a background thread so as not to block the UI as the upload is /// going. /// </summary> /// <param name="state">unused</param> private void threadedUploadFile(object state) { try { // Make sure the bucket exists this._transferUtility.S3Client.PutBucket(new PutBucketRequest().WithBucketName(this.Bucket)); TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() .WithBucketName(this.Bucket) .WithFilePath(this.UploadFile) .WithTimeout(FIVE_MINUTES) .WithSubscriber(this.uploadFileProgressCallback); // Pass in a callback so upload progress can be tracked. this._transferUtility.Upload(request); displayMessageBox("Completed file upload!", "Success", MessageBoxButton.OK, MessageBoxImage.Information); } catch (Exception e) { displayMessageBox(e.Message, "Error", MessageBoxButton.OK, MessageBoxImage.Error); } finally { updateIsEnabled(this._ctlUploadFile, true); } }
/// <summary> /// Uploads the file or stream specified by the request. /// To track the progress of the upload, /// add an event listener to the request's <c>UploadProgressEvent</c>. /// For large uploads, the file will be divided and uploaded in parts using /// Amazon S3's multipart API. The parts will be reassembled as one object in /// Amazon S3. /// </summary> /// <param name="request"> /// Contains all the parameters used for uploading to Amazon S3. /// </param> public void Upload(TransferUtilityUploadRequest request) { validate(request); BaseCommand command; if (request.ContentLength < this._config.MinSizeBeforePartUpload) { command = new SimpleUploadCommand(this._s3Client, this._config, request); } else { command = new MultipartUploadCommand(this._s3Client, this._config, request); } command.Execute(); }
public void Run() { MakeConsoleNicer(); //PrepareForPackaging(); //using (RestorePoint rp = new RestorePoint(q.files(new Pattern("^/Samples/*/*.(cs|vb)proj$")))) { // //Replace all project references temporarily // foreach (string pf in q.files(new Pattern("^/Samples/[^/]+/*.(cs|vb)proj$"))) { // new ProjectFileEditor(pf).ReplaceAllProjectReferencesWithDllReferences("..\\..\\dlls\\release"); // } //} say("Project root: " + f.ParentPath); nl(); //The base name for creating zip packags. string packageBase = v.get("PackageName"); // // [assembly: PackageName("Resizer")] //List the file version number [assembly: AssemblyFileVersion("3.0.5.*")] string fileVer = list("FileVersion", v.get("AssemblyFileVersion").TrimEnd('.', '*')); //List the assembly version number. AssemblyVersion("3.0.5.*")] string assemblyVer = list("AssemblyVersion", v.get("AssemblyVersion").TrimEnd('.', '*')); //List the information version number. (used in zip package names) [assembly: AssemblyInformationalVersion("3-alpha-5")] string infoVer = list("InfoVersion", v.get("AssemblyInformationalVersion").TrimEnd('.', '*')); //List the Nuget package version number. New builds need to have a 4th number specified. string nugetVer = list("NugetVersion", v.get("NugetVersion").TrimEnd('.', '*')); //a. Ask if version numbers need to be modified if (ask("Change version numbers?")) { //b. Ask for file version number [assembly: AssemblyFileVersion("3.0.5.*")] fileVer = change("FileVersion", v.get("AssemblyFileVersion").TrimEnd('.', '*')); //c. Ask for assembly version number. AssemblyVersion("3.0.5.*")] assemblyVer = change("AssemblyVersion", v.get("AssemblyVersion").TrimEnd('.', '*')); //d: Ask for information version number. (used in zip package names) [assembly: AssemblyInformationalVersion("3-alpha-5")] infoVer = change("InfoVersion", v.get("AssemblyInformationalVersion").TrimEnd('.', '*')); //e. Ask for Nuget package version number. New builds need to have a 4th number specified. nugetVer = change("NugetVersion", v.get("NugetVersion").TrimEnd('.', '*')); } //b. Ask about hotfix - for hotfixes, we embed warnings and stuff so they don't get used in production. bool isHotfix = ask("Is this a hotfix? Press Y to tag the assembiles and packages as such."); //Build the hotfix name string packageHotfix = isHotfix ? ("-hotfix-" + DateTime.Now.ToString("htt").ToLower()) : ""; //Get the download server from SharedAssemblyInfo.cs if specified string downloadServer = v.get("DownloadServer"); if (downloadServer == null) downloadServer = "http://downloads.imageresizing.net/"; //f. For each package, specify options: choose 'c' (create and/or overwrite), 'u' (upload), 'p' (make private). //Should inform if the file already exists. nl(); say("For each zip package, specify all operations to perform, then press enter."); say("'c' - Create package (overwrite if exists), 'u' (upload to S3), 's' (skip), 'p' (make private)"); bool isBuilding = false; StringBuilder downloadPaths = new StringBuilder(); foreach (PackageDescriptor desc in packages) { desc.Path = getReleasePath(packageBase, infoVer, desc.Kind, packageHotfix); if (desc.Exists) say("\n" + Path.GetFileName(desc.Path) + " already exists"); string opts = ""; Console.Write(desc.Kind + " (" + opts + "):"); opts = Console.ReadLine().Trim(); desc.Options = opts; if (desc.Build) isBuilding = true; if (desc.Upload) { downloadPaths.AppendLine(downloadServer + Path.GetFileName(desc.Path)); } } if (downloadPaths.Length > 0){ say("Once complete, your files will be available at"); say(downloadPaths.ToString()); if (ask("Copy these to the clipboard?")) System.Windows.Clipboard.SetText(downloadPaths.ToString()); } //Get all the .nuspec packages on in the /nuget directory. IList<NPackageDescriptor> npackages =NPackageDescriptor.GetPackagesIn(Path.Combine(f.ParentPath,"nuget")); bool isMakingNugetPackage = false; if (ask("Create or upload NuGet packages?")) { foreach (NPackageDescriptor desc in npackages) { desc.VariableSubstitutions = GetNugetVariables(); desc.VariableSubstitutions["version"] = nugetVer; desc.Version = nugetVer; desc.OutputDirectory = Path.Combine(Path.Combine(f.ParentPath, "Releases", "nuget-packages")); if (!Directory.Exists(desc.OutputDirectory)) Directory.CreateDirectory(desc.OutputDirectory); say(Path.GetFileName(desc.PackagePath) + (desc.PackageExists ? " exists" : " not found"), desc.PackageExists ? ConsoleColor.Green : ConsoleColor.Gray); say(Path.GetFileName(desc.SymbolPackagePath) + (desc.SymbolPackageExists ? " exists" : " not found"), desc.SymbolPackageExists ? ConsoleColor.Green : (desc.PackageExists ? ConsoleColor.Red : ConsoleColor.Gray)); } say("What should we do with these packages? Enter multiple options like 'ou' "); say("r (create missing packages), c (overwrite all packages), u (upload all packages to nuget.org), i (enter interactive mode - choose per package), s (skip)"); string selection = Console.ReadLine().Trim().ToLowerInvariant(); bool interactive = selection.IndexOf('i') > -1; if (interactive) selection = selection.Replace("i",""); //Set the default for every package foreach (NPackageDescriptor desc in npackages) desc.Options = selection; //Let the user pick per package if (interactive) { foreach (NPackageDescriptor desc in npackages) { Console.Write(desc.BaseName + " (" + desc.Options + "):"); desc.Options = Console.ReadLine().Trim().ToLowerInvariant(); } } isMakingNugetPackage = npackages.Any(desc => desc.Build); } var cs = new CredentialStore(); if (downloadPaths.Length > 0) { cs.Need("S3ID", "Amazon S3 AccessKey ID"); cs.Need("S3KEY", "Amazon S3 SecretAccessKey"); } if (isMakingNugetPackage) cs.Need("NugetKey", "NuGet API Key"); cs.AcquireCredentials(); nuget.apiKey = cs.Get("NugetKey",null); string s3ID = cs.Get("S3ID",null); string s3Key = cs.Get("S3KEY", null); s3 = new TransferUtility(s3ID, s3Key, Amazon.RegionEndpoint.USEast1); if (!isBuilding && isMakingNugetPackage) { isBuilding = ask("You're creating 1 or more NuGet packages. Rebuild software?"); } if (isBuilding) { //1 (moved execution to 8a) bool cleanAll = ask("Clean All?"); //2 - Set version numbers (with *, if missing) string originalContents = v.Contents; //Save for checking changes. v.set("AssemblyFileVersion", v.join(fileVer, "*")); v.set("AssemblyVersion", v.join(assemblyVer, "*")); v.set("AssemblyInformationalVersion", infoVer); v.set("NugetVersion", nugetVer); v.set("Commit", "git-commit-guid-here"); v.Save(); //Save contents for reverting later string fileContents = v.Contents; //Generate hard revision number for building (so all dlls use the same number) short revision = (short)(DateTime.UtcNow.TimeOfDay.Milliseconds % short.MaxValue); //the part under 32767. Can actually go up to, 65534, but what's the point. string exactVersion = v.join(fileVer, revision.ToString()); string fullInfoVer = infoVer + (isHotfix ? ("-temp-hotfix-" + DateTime.Now.ToString("MMM-d-yyyy-htt").ToLower()) : ""); string tag = "resizer" + v.join(infoVer, revision.ToString()) + (isHotfix ? "-hotfix": ""); //3 - Prompt to commit and tag bool versionsChanged = !fileContents.Equals(originalContents); string question = versionsChanged ? "SharedAssemblyInfo.cs was modified. Commit it (and any other changes) to the repository, then hit 'y'." : "Are all changes commited? Hit 'y' to continue. The SHA-1 of HEAD will be embedded in the DLLs."; while (!ask(question)) { } if (ask("Tag HEAD with '" + tag + "'?")) g.Tag(tag); //[assembly: Commit("git-commit-guid-here")] //4 - Embed git commit value string gitCommit = g.CanExecute ? g.GetHeadHash() : "git-could-not-run-during-build"; v.set("Commit", gitCommit); //4b - change to hard version number for building v.set("AssemblyFileVersion", exactVersion); v.set("AssemblyVersion", exactVersion); //Add hotfix suffix for hotfixes v.set("AssemblyInformationalVersion", fullInfoVer); v.Save(); //Prepare searchersq PrepareForPackaging(); bool success = false; //Allows use to temporarily edit all the sample project files using (RestorePoint rp = new RestorePoint(q.files(new Pattern("^/Plugins/*/*.(cs|vb)proj$"), new Pattern("^/Contrib/*/*.(cs|vb)proj$")))) { //Replace all project references temporarily foreach (string pf in rp.Paths) { new ProjectFileEditor(pf).RemoveStrongNameRefs(); } //8a Clean projects if specified if (cleanAll) { CleanAll(); } //6 - if (c) was specified for any package, build all. success = BuildAll(true); //isMakingNugetPackage); } //7 - Revert file to state at commit (remove 'full' version numbers and 'commit' value) v.Contents = fileContents; v.Save(); if (!success) return; //If the build didn't go ok, pause and exit //8b - run cleanup routine RemoveUselessFiles(); //Allows use to temporarily edit all the sample project files using (RestorePoint rp = new RestorePoint(q.files(new Pattern("^/Samples/*/*.(cs|vb)proj$")))) { //Replace all project references temporarily foreach (string pf in q.files(new Pattern("^/Samples/[^/]+/*.(cs|vb)proj$"))) { new ProjectFileEditor(pf).ReplaceAllProjectReferencesWithDllReferences("..\\..\\dlls\\release").RemoveStrongNameRefs(); } //9 - Pacakge all selected zip configurations foreach (PackageDescriptor pd in packages) { if (pd.Skip || !pd.Build) continue; if (pd.Exists && pd.Build) { File.Delete(pd.Path); say("Deleted " + pd.Path); } pd.Builder(pd); //Copy to a 'tozip' version for e-mailing //File.Copy(pd.Path, pd.Path.Replace(".zip", ".tozip"), true); } } } //10 - Pacakge all nuget configurations foreach (NPackageDescriptor pd in npackages) { if (pd.Skip) continue; if (pd.Build) nuget.Pack(pd); } //11 - Upload all selected zip configurations foreach (PackageDescriptor pd in packages) { if (pd.Skip) continue; if (pd.Upload) { if (!pd.Exists) { say("Can't upload, file missing: " + pd.Path); continue; } var request = new TransferUtilityUploadRequest(); request.CannedACL = pd.Private ? Amazon.S3.S3CannedACL.Private : Amazon.S3.S3CannedACL.PublicRead; request.BucketName = bucketName; request.Timeout = null; request.ContentType = "application/zip"; request.Key = Path.GetFileName(pd.Path); request.FilePath = pd.Path; say("Uploading " + Path.GetFileName(pd.Path) + " to " + bucketName + " with CannedAcl:" + request.CannedACL.ToString()); bool retry = false; do { //Upload try { s3.Upload(request); } catch (Exception ex) { say("Upload failed: " + ex.Message); retry = ask("Retry upload?"); } } while (retry); say("Finished uploading " + Path.GetFileName(pd.Path)); } } //2 - Upload all nuget configurations foreach (NPackageDescriptor pd in npackages) { if (pd.Skip || !pd.Upload) continue; nuget.Push(pd); } //12 - Generate template for release notes article say("Everything is done."); }
void validate(TransferUtilityUploadRequest request) { if (request == null) { throw new ArgumentNullException("request"); } if (!request.IsSetBucketName()) { throw new ArgumentNullException("bucketName"); } if (!request.IsSetFilePath() && !request.IsSetInputStream()) { throw new ArgumentException( "Please specify either a Filename or provide a Stream to PUT an object into S3."); } if (!request.IsSetKey()) { if (request.IsSetFilePath()) { request.Key = new FileInfo(request.FilePath).Name; } else { throw new ArgumentException( "The Key property must be specified when using a Stream to upload into S3."); } } if (request.IsSetFilePath() && !File.Exists(request.FilePath)) throw new ArgumentException("The file indicated by the FilePath property does not exist!"); }
public static Task UploadAsync( this TransferUtility utility, TransferUtilityUploadRequest request) { IAsyncResult ar = utility.BeginUpload(request, null, null); return Task.Factory.FromAsync(ar, utility.EndUpload); }
/// <summary> /// Initiates the asynchronous execution of the Upload operation. /// <seealso cref="Amazon.S3.IAmazonS3.AbortMultipartUpload"/> /// </summary> /// <remarks> /// <para> /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request. /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload. /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts, /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads. /// </para> /// </remarks> /// <param name="request"> /// Contains all the parameters required to upload to Amazon S3. /// </param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// <returns>The task object representing the asynchronous operation.</returns> public Task UploadAsync(TransferUtilityUploadRequest request, CancellationToken cancellationToken = default(CancellationToken)) { return(ExecuteAsync(() => UploadHelper(request), cancellationToken)); }
protected void cmdEnviarImagem_Click(object sender, EventArgs e) { try { if (String.IsNullOrEmpty(txtNumero.Text)) { lblImagem.Text = "Digite o número do seu Kit"; txtNumero.Focus(); return; } else { int tamanho = flUploadImagem.PostedFile.ContentLength; if (tamanho <= 4000000) { string filename = System.IO.Path.GetFileName(flUploadImagem.PostedFile.FileName); filename = Guid.NewGuid() + filename; TransferUtility fileTransferUtility = new TransferUtility(System.Configuration.ConfigurationManager.AppSettings["AccessKey"].ToString(), System.Configuration.ConfigurationManager.AppSettings["SecretKey"].ToString()); var uploadRequest = new Amazon.S3.Transfer.TransferUtilityUploadRequest(); uploadRequest.InputStream = flUploadImagem.PostedFile.InputStream; uploadRequest.BucketName = "BEPiD"; uploadRequest.Key = filename; uploadRequest.StorageClass = S3StorageClass.ReducedRedundancy; uploadRequest.CannedACL = S3CannedACL.PublicRead; fileTransferUtility.Upload(uploadRequest); lblImagem.Text = "<img src='http://s3.amazonaws.com/BEPiD/" + filename.ToString() + "' style='border-radius:30px;'/>"; string urlImagem = "http://s3.amazonaws.com/BEPiD/" + filename.ToString(); HttpWebRequest httpWebRequest = (HttpWebRequest)HttpWebRequest.Create(urlImagem); HttpWebResponse httpWebReponse = (HttpWebResponse)httpWebRequest.GetResponse(); Stream stream = httpWebReponse.GetResponseStream(); System.Drawing.Image objImage = System.Drawing.Image.FromStream(stream); int w = objImage.Width; int h = objImage.Height; if (w <= 500 && h <= 500) { //atualizar o banco de dados. //HttpCookie Session = Request.Cookies["BEPiDUCB.Site"]; AlunoDTO dto = new AlunoDTO(); dto.idAluno = int.Parse(Session["I"].ToString()); if (txtNumero.Text.Length > 0) { dto.numero = int.Parse(txtNumero.Text); } dto.foto = filename.ToString(); AlunoBRL alunoBRL = new AlunoBRL(); if (alunoBRL.updateAlunoNumeroMaquinaImagem(dto)) { //lblImagem.Text = "Foto enviada com sucesso."; //cmdEnviarImagem.Visible = false; //esconde o botao. } else { } //sResponse.Write("Não feito"); } else { lblImagem.Text = "A imagem deve a largura e altura de no máximo 500 pixels."; } } else { lblImagem.Text = "A imagem deve ser menor do que 4.000.000 bytes."; } } } catch (Exception ex) { lblImagem.Text = ex.Message.ToString() + " - " + ex.StackTrace.ToString(); } }
/// <summary> /// Uploads the file or stream specified by the request. /// To track the progress of the upload, /// add an event listener to the request's <c>UploadProgressEvent</c>. /// For large uploads, the file will be divided and uploaded in parts using /// Amazon S3's multipart API. The parts will be reassembled as one object in /// Amazon S3. /// </summary> /// <remarks> /// <para> /// If you are uploading large files, TransferUtility will use multipart upload to fulfill the request. /// If a multipart upload is interrupted, TransferUtility will attempt to abort the multipart upload. /// Under certain circumstances (network outage, power failure, etc.), TransferUtility will not be able /// to abort the multipart upload. In this case, in order to stop getting charged for the storage of uploaded parts, /// you should manually invoke TransferUtility.AbortMultipartUploads() to abort the incomplete multipart uploads. /// </para> /// </remarks> /// <param name="request"> /// Contains all the parameters required to upload to Amazon S3. /// </param> public void Upload(TransferUtilityUploadRequest request) { this.UploadHelper(request); }