public override void Execute() { int timeout = this._config.DefaultTimeout; if (this._fileTransporterRequest.Timeout != 0) timeout = this._fileTransporterRequest.Timeout; PutObjectRequest putRequest = new PutObjectRequest() .WithBucketName(this._fileTransporterRequest.BucketName) .WithKey(this._fileTransporterRequest.Key) .WithCannedACL(this._fileTransporterRequest.CannedACL) .WithContentType(this._fileTransporterRequest.ContentType) .WithFilePath(this._fileTransporterRequest.FilePath) .WithTimeout(timeout) .WithStorageClass(this._fileTransporterRequest.StorageClass) .WithAutoCloseStream(this._fileTransporterRequest.AutoCloseStream) .WithSubscriber(new EventHandler<PutObjectProgressArgs>(this.putObjectProgressEventCallback)); putRequest.InputStream = this._fileTransporterRequest.InputStream; if (this._fileTransporterRequest.metadata != null && this._fileTransporterRequest.metadata.Count > 0) putRequest.WithMetaData(this._fileTransporterRequest.metadata); if (this._fileTransporterRequest.Headers != null && this._fileTransporterRequest.Headers.Count > 0) putRequest.AddHeaders(this._fileTransporterRequest.Headers); this._s3Client.PutObject(putRequest); }
void UploadContent(string key, Stream content) { var request = new PutObjectRequest() .WithBucketName(bucket) .WithKey(key) .WithCannedACL(cannedACL) .WithInputStream(content) as PutObjectRequest; if(headers != null) request.AddHeaders(headers); //TODO: handle exceptions properly s3client.PutObject(request); if(invalidator != null) invalidator.InvalidateObject(bucket, key); }
public override void Execute() { int timeout = this._config.DefaultTimeout; if (this._fileTransporterRequest.Timeout != 0) timeout = this._fileTransporterRequest.Timeout; PutObjectRequest putRequest = new PutObjectRequest { BucketName = this._fileTransporterRequest.BucketName, Key = this._fileTransporterRequest.Key, CannedACL = this._fileTransporterRequest.CannedACL, ContentType = this._fileTransporterRequest.ContentType, FilePath = this._fileTransporterRequest.FilePath, Timeout = timeout, StorageClass = this._fileTransporterRequest.StorageClass, AutoCloseStream = this._fileTransporterRequest.AutoCloseStream, ServerSideEncryptionMethod = this._fileTransporterRequest.ServerSideEncryptionMethod, }; putRequest.PutObjectProgressEvent += new EventHandler<PutObjectProgressArgs>(this.putObjectProgressEventCallback); putRequest.InputStream = this._fileTransporterRequest.InputStream; if (this._fileTransporterRequest.metadata != null && this._fileTransporterRequest.metadata.Count > 0) putRequest.WithMetaData(this._fileTransporterRequest.metadata); if (this._fileTransporterRequest.Headers != null && this._fileTransporterRequest.Headers.Count > 0) putRequest.AddHeaders(this._fileTransporterRequest.Headers); this._s3Client.PutObject(putRequest); }
public void UploadObject(UploadRequest request) { CheckUri(request.Uri); try { var putRequest = new PutObjectRequest(); using (var client = CreateAmazonS3Client()) { var absolutePath = HttpUtility.UrlDecode(request.Uri.AbsolutePath); var key = absolutePath.TrimStart(Convert.ToChar("/")); putRequest.WithBucketName(bucketName) .WithKey(key) .WithInputStream(request.InputStream); if (accessControlEnabledGlobally && !request.IgnoreAccessControl) { putRequest.WithCannedACL(S3CannedACL.Private); } else { putRequest.WithCannedACL(S3CannedACL.PublicRead); } if (request.Headers != null && request.Headers.Count > 0) { putRequest.AddHeaders(request.Headers); } if (request.MetaData != null && request.MetaData.Count > 0) { putRequest.WithMetaData(request.MetaData); } putRequest.ContentType = MimeTypeUtility.DetermineContentType(request.Uri); using (client.PutObject(putRequest)) { } } } catch (Exception e) { throw new StorageException(string.Format("Failed to upload object with request {0}.", request), e); } }
public override string SavePrivate(string domain, string path, Stream stream, DateTime expires) { using (AmazonS3 client = GetClient()) { var request = new PutObjectRequest(); string objectKey = MakePath(domain, path); request.WithBucketName(_bucket) .WithKey(objectKey) .WithCannedACL(S3CannedACL.BucketOwnerFullControl) .WithContentType("application/octet-stream") .WithMetaData("private-expire", expires.ToFileTimeUtc().ToString()); var headers = new NameValueCollection(); headers.Add("Cache-Control", string.Format("public, maxage={0}", (int)TimeSpan.FromDays(5).TotalSeconds)); headers.Add("Etag", (DateTime.UtcNow.Ticks).ToString()); headers.Add("Last-Modified", DateTime.UtcNow.ToString("R")); headers.Add("Expires", DateTime.UtcNow.Add(TimeSpan.FromDays(5)).ToString("R")); headers.Add("Content-Disposition", "attachment"); request.AddHeaders(headers); request.WithInputStream(stream); client.PutObject(request); //Get presigned url GetPreSignedUrlRequest pUrlRequest = new GetPreSignedUrlRequest() .WithBucketName(_bucket) .WithExpires(expires) .WithKey(objectKey) .WithProtocol(Protocol.HTTP) .WithVerb(HttpVerb.GET); string url = client.GetPreSignedURL(pUrlRequest); //TODO: CNAME! return url; } }
public Uri Save(string domain, string path, Stream stream, string contentType, string contentDisposition, ACL acl) { bool postWriteCheck = false; if (QuotaController != null) { try { QuotaController.QuotaUsedAdd(_modulename, domain, _dataList.GetData(domain), stream.Length); } catch (Exception) { postWriteCheck = true; } } using (AmazonS3 client = GetClient()) { var request = new PutObjectRequest(); string mime = string.IsNullOrEmpty(contentType) ? MimeMapping.GetMimeMapping(Path.GetFileName(path)) : contentType; request.WithBucketName(_bucket) .WithKey(MakePath(domain, path)) .WithCannedACL(acl == ACL.Auto ? GetDomainACL(domain) : GetS3Acl(acl)) .WithContentType(mime); var headers = new NameValueCollection(); headers.Add("Cache-Control", string.Format("public, maxage={0}", (int)TimeSpan.FromDays(5).TotalSeconds)); headers.Add("Etag", (DateTime.UtcNow.Ticks).ToString()); headers.Add("Last-Modified", DateTime.UtcNow.ToString("R")); headers.Add("Expires", DateTime.UtcNow.Add(TimeSpan.FromDays(5)).ToString("R")); if (!string.IsNullOrEmpty(contentDisposition)) { headers.Add("Content-Disposition", contentDisposition); } else if (mime == "application/octet-stream") { headers.Add("Content-Disposition", "attachment"); } request.AddHeaders(headers); //Send body var buffered = stream.GetBuffered(); if (postWriteCheck) { QuotaController.QuotaUsedAdd(_modulename, domain, _dataList.GetData(domain), buffered.Length); } request.AutoCloseStream = false; request.WithInputStream(buffered); client.PutObject(request); InvalidateCloudFront(MakePath(domain, path)); return GetUri(domain, path); } }
/// <summary> /// Publishes a file to Amazon S3. /// </summary> /// <param name="filePath">The path of the file to publish.</param> private void PublishFile(string filePath) { NameValueCollection headers = new NameValueCollection(); string contentType = MimeType.FromCommon(filePath).ContentType; string objectKey = this.ObjectKey(filePath); if (this.OverwriteExisting || !this.ObjectExists(objectKey)) { PutObjectRequest request = new PutObjectRequest() .WithBucketName(this.BucketName) .WithCannedACL(S3CannedACL.PublicRead) .WithContentType(contentType) .WithKey(objectKey) .WithTimeout(this.Timeout); bool gzip = false; string tempPath = null; if (contentType.StartsWith("text", StringComparison.OrdinalIgnoreCase)) { gzip = true; tempPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), Path.GetRandomFileName()); using (FileStream fs = File.OpenRead(filePath)) { using (FileStream temp = File.Create(tempPath)) { using (GZipStream gz = new GZipStream(temp, CompressionMode.Compress)) { byte[] buffer = new byte[4096]; int count; while (0 < (count = fs.Read(buffer, 0, buffer.Length))) { gz.Write(buffer, 0, count); } } } } headers["Content-Encoding"] = "gzip"; request = request.WithFilePath(tempPath); } else { request = request.WithFilePath(filePath); } request.AddHeaders(headers); using (PutObjectResponse response = this.Client().PutObject(request)) { } if (!String.IsNullOrEmpty(tempPath) && File.Exists(tempPath)) { File.Delete(tempPath); } this.PublisherDelegate.OnFilePublished(filePath, objectKey, gzip); } else { this.PublisherDelegate.OnFileSkipped(filePath, objectKey); } }
public void UploadFileToS3(MemoryStream inputStream, string bucketName, string fileKey, string cacheControl, Amazon.S3.Model.S3CannedACL acl, bool isGZipped) { Debug.Assert(UploadToS3); var request = new Amazon.S3.Model.PutObjectRequest { InputStream = inputStream, BucketName = bucketName, CannedACL = Amazon.S3.Model.S3CannedACL.PublicRead, Key = fileKey, ContentType = GetMimeType(System.IO.Path.GetExtension(fileKey)), }; Console.WriteLine(string.Format("Uploading file '{0}' to S3.", fileKey)); var headers = new NameValueCollection(); if (!string.IsNullOrEmpty(cacheControl)) headers.Add("Cache-Control", cacheControl); if (isGZipped) headers.Add("Content-Encoding", "gzip"); request.ContentType = GetMimeType(System.IO.Path.GetExtension(fileKey)); inputStream.Position = 0; request.AddHeaders(headers); S3Client.PutObject(request); }
public Uri Save(string domain, string path, Stream stream, string contentType, string contentDisposition, ACL acl, string contentEncoding = null, int cacheDays = 5) { bool postWriteCheck = false; if (QuotaController != null) { try { QuotaController.QuotaUsedAdd(_modulename, domain, _dataList.GetData(domain), stream.Length); } catch (TenantQuotaException) { //this exception occurs only if tenant has no free space //or if file size larger than allowed by quota //so we can exit this function without stream buffering etc throw; } catch (Exception) { postWriteCheck = true; } } using (AmazonS3 client = GetClient()) { var request = new PutObjectRequest(); string mime = string.IsNullOrEmpty(contentType) ? MimeMapping.GetMimeMapping(Path.GetFileName(path)) : contentType; request.BucketName = _bucket; request.Key = MakePath(domain, path); request.CannedACL = acl == ACL.Auto ? GetDomainACL(domain) : GetS3Acl(acl); request.ContentType = mime; request.ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256; var requestHeaders = new NameValueCollection(); requestHeaders.Add("Cache-Control", string.Format("public, maxage={0}", (int)TimeSpan.FromDays(cacheDays).TotalSeconds)); requestHeaders.Add("Etag", (DateTime.UtcNow.Ticks).ToString(CultureInfo.InvariantCulture)); requestHeaders.Add("Last-Modified", DateTime.UtcNow.ToString("R")); requestHeaders.Add("Expires", DateTime.UtcNow.Add(TimeSpan.FromDays(cacheDays)).ToString("R")); if (!string.IsNullOrEmpty(contentDisposition)) { requestHeaders.Add("Content-Disposition", contentDisposition); } else if (mime == "application/octet-stream") { requestHeaders.Add("Content-Disposition", "attachment"); } if (!string.IsNullOrEmpty(contentEncoding)) { requestHeaders.Add("Content-Encoding", contentEncoding); } request.AddHeaders(requestHeaders); //Send body var buffered = stream.GetBuffered(); if (postWriteCheck) { QuotaController.QuotaUsedAdd(_modulename, domain, _dataList.GetData(domain), buffered.Length); } request.AutoCloseStream = false; request.InputStream = buffered; PutObjectResponse response = client.PutObject(request); var destinationObjectEncryptionStatus = response.ServerSideEncryptionMethod; //..ServerSideEncryptionMethod; InvalidateCloudFront(MakePath(domain, path)); return GetUri(domain, path); } }
public void UploadObject(UploadRequest request) { CheckUri(request.Uri); try { var putRequest = new PutObjectRequest(); using (var client = CreateAmazonS3Client()) { var absolutePath = request.Uri.AbsolutePath; var key = absolutePath.TrimStart(Convert.ToChar("/")); putRequest.WithBucketName(bucketName) .WithKey(key) .WithCannedACL(S3CannedACL.PublicRead) .WithInputStream(request.InputStream); if (request.Headers != null && request.Headers.Count > 0) { putRequest.AddHeaders(request.Headers); } if (request.MetaData != null && request.MetaData.Count > 0) { putRequest.WithMetaData(request.MetaData); } using (client.PutObject(putRequest)) { } } } catch (Exception e) { throw new StorageException(string.Format("Failed to upload object with request {0}.", request), e); } }