//--- Methods --- public async Task <Response <S3WriterResourceAttribute> > Create(S3WriterResourceProperties properties) { _logger.LogInfo($"copying package s3://{properties.SourceBucketName}/{properties.SourceKey} to S3 bucket {properties.DestinationBucketName}"); // download package and copy all files to destination bucket var fileEntries = new Dictionary <string, string>(); if (!await ProcessZipFileItemsAsync(properties.SourceBucketName, properties.SourceKey, async entry => { await UploadEntry(entry, properties); fileEntries.Add(entry.FullName, ComputeEntryHash(entry, properties)); })) { throw new FileNotFoundException("Unable to download source package"); } // create package manifest for future deletion _logger.LogInfo($"uploaded {fileEntries.Count:N0} files"); await WriteManifest(properties, fileEntries); return(new Response <S3WriterResourceAttribute> { PhysicalResourceId = $"s3unzip:{properties.DestinationBucketName}:{properties.DestinationKey}", Attributes = new S3WriterResourceAttribute { Url = $"s3://{properties.DestinationBucketName}/{properties.DestinationKey}" } }); }
//--- Methods --- public async Task <Response <S3WriterResourceAttribute> > Create(S3WriterResourceProperties properties) { // nothing to do on create return(new Response <S3WriterResourceAttribute> { PhysicalResourceId = $"s3emptybucket:{properties.BucketName}", Attributes = new S3WriterResourceAttribute { BucketName = properties.BucketName } }); }
private async Task UploadEntry(ZipArchiveEntry entry, S3WriterResourceProperties properties) { // unzip entry using (var stream = entry.Open()) { var memoryStream = new MemoryStream(); // determine if stream needs to be encoded string contentEncodingHeader = null; var encoding = DetermineEncodingType(entry.FullName, properties); switch (encoding) { case "NONE": await stream.CopyToAsync(memoryStream); break; case "GZIP": contentEncodingHeader = "gzip"; using (var gzipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true)) { await stream.CopyToAsync(gzipStream); } break; case "BROTLI": contentEncodingHeader = "br"; using (var brotliStream = new BrotliStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true)) { await stream.CopyToAsync(brotliStream); } break; default: _logger.LogWarn("unrecognized compression type {0} for {1}", encoding, entry.FullName); encoding = "NONE"; goto case "NONE"; } var base64 = GetMD5AsBase64(memoryStream); // only upload file if new or the contents have changed var destination = Path.Combine(properties.DestinationKey, entry.FullName).Replace('\\', '/'); _logger.LogInfo($"uploading file: {destination} [encoding: {encoding.ToLowerInvariant()}]"); await _transferUtility.UploadAsync(new TransferUtilityUploadRequest { Headers = { ContentEncoding = contentEncodingHeader, ContentMD5 = base64 }, InputStream = memoryStream, BucketName = properties.DestinationBucket, Key = destination }); } }
public async Task <Response <S3WriterResourceAttribute> > Delete(S3WriterResourceProperties properties) { _logger.LogInfo($"deleting JSON file at s3://{properties.BucketName}/{properties.Key}"); try { await _s3Client.DeleteObjectAsync(new DeleteObjectRequest { BucketName = properties.BucketName, Key = properties.Key }); } catch (Exception e) { _logger.LogErrorAsWarning(e, "unable to delete JSON file at s3://{0}/{1}", properties.BucketName, properties.Key); } return(new Response <S3WriterResourceAttribute>()); }
private async Task WriteManifest(S3WriterResourceProperties properties, Dictionary <string, string> fileEntries) { var manifestStream = new MemoryStream(); using (var manifest = new ZipArchive(manifestStream, ZipArchiveMode.Create, leaveOpen: true)) using (var manifestEntryStream = manifest.CreateEntry("manifest.txt").Open()) using (var manifestEntryWriter = new StreamWriter(manifestEntryStream)) { await manifestEntryWriter.WriteAsync(string.Join("\n", fileEntries.Select(file => $"{file.Key}\t{file.Value}"))); } await _transferUtility.UploadAsync( manifestStream, _manifestBucket, $"{properties.DestinationBucketName}/{properties.SourceKey}" ); }
public async Task <Response <S3WriterResourceAttribute> > Delete(S3WriterResourceProperties properties) { if (properties.Enabled == false) { // don't do anything if disabled return(new Response <S3WriterResourceAttribute>()); } var bucketName = properties.BucketName; _logger.LogInfo($"emptying bucket: {bucketName}"); // enumerate all S3 objects var request = new ListObjectsV2Request { BucketName = bucketName }; var counter = 0; var deletions = new List <Task>(); do { var response = await _s3Client.ListObjectsV2Async(request); // delete any objects found if (response.S3Objects.Any()) { deletions.Add(_s3Client.DeleteObjectsAsync(new DeleteObjectsRequest { BucketName = bucketName, Objects = response.S3Objects.Select(s3 => new KeyVersion { Key = s3.Key }).ToList(), Quiet = true })); counter += response.S3Objects.Count; } // continue until no more objects can be fetched request.ContinuationToken = response.NextContinuationToken; } while(request.ContinuationToken != null); // wait for all deletions to complete await Task.WhenAll(deletions); _logger.LogInfo($"deleted {counter:N0} objects"); return(new Response <S3WriterResourceAttribute>()); }
//--- Methods --- public async Task <Response <S3WriterResourceAttribute> > Create(S3WriterResourceProperties properties) { _logger.LogInfo($"writing JSON file to s3://{properties.BucketName}/{properties.Key}"); var contents = Serialize(properties.Contents); await _s3Client.PutObjectAsync(new PutObjectRequest { BucketName = properties.BucketName, ContentBody = contents, ContentType = "application/json", Key = properties.Key }); _logger.LogInfo($"JSON file written ({Encoding.UTF8.GetByteCount(contents):N0} bytes)"); return(new Response <S3WriterResourceAttribute> { PhysicalResourceId = $"s3writejson:{properties.BucketName}:{properties.Key}", Attributes = new S3WriterResourceAttribute { Url = $"s3://{properties.BucketName}/{properties.Key}" } }); }
public async Task <Response <S3WriterResourceAttribute> > Delete(S3WriterResourceProperties properties) { _logger.LogInfo($"deleting package {properties.SourceKey} from S3 bucket {properties.DestinationBucketName}"); // download package manifest var fileEntries = await ReadAndDeleteManifest(properties); if (fileEntries == null) { return(new Response <S3WriterResourceAttribute>()); } // delete all files from manifest await BatchDeleteFiles( properties.DestinationBucketName, fileEntries.Select(kv => Path.Combine(properties.DestinationKey, kv.Key)).ToList() ); return(new Response <S3WriterResourceAttribute>()); }
private async Task <Dictionary <string, string> > ReadAndDeleteManifest(S3WriterResourceProperties properties) { // download package manifest var fileEntries = new Dictionary <string, string>(); var key = $"{properties.DestinationBucketName}/{properties.SourceKey}"; if (!await ProcessZipFileItemsAsync( _manifestBucket, key, async entry => { using (var stream = entry.Open()) using (var reader = new StreamReader(stream)) { var manifest = await reader.ReadToEndAsync(); if (!string.IsNullOrWhiteSpace(manifest)) { foreach (var line in manifest.Split('\n')) { var columns = line.Split('\t'); fileEntries.Add(columns[0], columns[1]); } } } } )) { _logger.LogWarn($"unable to dowload zip file from s3://{_manifestBucket}/{key}"); return(null); } // delete manifest after reading it try { await _s3Client.DeleteObjectAsync(new DeleteObjectRequest { BucketName = _manifestBucket, Key = key }); } catch { _logger.LogWarn($"unable to delete manifest file at s3://{_manifestBucket}/{key}"); } return(fileEntries); }
//--- Methods --- public async Task <Response <S3WriterResourceAttribute> > Create(S3WriterResourceProperties properties) { _logger.LogInfo($"copying package s3://{properties.SourceBucketName}/{properties.SourceKey} to S3 bucket {properties.DestinationBucketName}"); // download package and copy all files to destination bucket var fileEntries = new Dictionary <string, string>(); if (!await ProcessZipFileItemsAsync(properties.SourceBucketName, properties.SourceKey, async entry => { using (var stream = entry.Open()) { var memoryStream = new MemoryStream(); await stream.CopyToAsync(memoryStream); var hash = GetMD5Hash(memoryStream); var destination = Path.Combine(properties.DestinationKey, entry.FullName).Replace('\\', '/'); _logger.LogInfo($"uploading file: {destination}"); await _transferUtility.UploadAsync( memoryStream, properties.DestinationBucketName, destination ); fileEntries.Add(entry.FullName, hash); } })) { throw new FileNotFoundException("Unable to download source package"); } // create package manifest for future deletion _logger.LogInfo($"uploaded {fileEntries.Count:N0} files"); await WriteManifest(properties, fileEntries); return(new Response <S3WriterResourceAttribute> { PhysicalResourceId = $"s3unzip:{properties.DestinationBucketName}:{properties.DestinationKey}", Attributes = new S3WriterResourceAttribute { Url = $"s3://{properties.DestinationBucketName}/{properties.DestinationKey}" } }); }
public Task <Response <S3WriterResourceAttribute> > Update(S3WriterResourceProperties oldProperties, S3WriterResourceProperties properties) => Create(properties);
public async Task <Response <S3WriterResourceAttribute> > Update(S3WriterResourceProperties oldProperties, S3WriterResourceProperties properties) { // check if the unzip properties have changed if ( (oldProperties.DestinationBucketName != properties.DestinationBucketName) || (oldProperties.DestinationKey != properties.DestinationKey) ) { _logger.LogInfo($"replacing package s3://{properties.SourceBucketName}/{properties.SourceKey} in S3 bucket {properties.DestinationBucketName}"); // remove old file and upload new ones; don't try to compute a diff await Delete(oldProperties); return(await Create(properties)); } else { _logger.LogInfo($"updating package {properties.SourceKey} in S3 bucket {properties.DestinationBucketName}"); // download old package manifest var oldFileEntries = await ReadAndDeleteManifest(oldProperties); if (oldFileEntries == null) { // unable to download the old manifest; continue with uploading new files return(await Create(properties)); } // download new source package var newFileEntries = new Dictionary <string, string>(); var uploadedCount = 0; var skippedCount = 0; if (!await ProcessZipFileItemsAsync(properties.SourceBucketName, properties.SourceKey, async entry => { using (var stream = entry.Open()) { var memoryStream = new MemoryStream(); await stream.CopyToAsync(memoryStream); var hash = GetMD5Hash(memoryStream); // only upload file if new or the contents have changed if (!oldFileEntries.TryGetValue(entry.FullName, out var existingHash) || (existingHash != hash)) { var destination = Path.Combine(properties.DestinationKey, entry.FullName).Replace('\\', '/'); _logger.LogInfo($"uploading file: {destination}"); await _transferUtility.UploadAsync( memoryStream, properties.DestinationBucketName, destination ); ++uploadedCount; } else { ++skippedCount; } newFileEntries.Add(entry.FullName, hash); } })) { throw new FileNotFoundException("Unable to download source package"); } // create package manifest for future deletion _logger.LogInfo($"uploaded {uploadedCount:N0} files"); _logger.LogInfo($"skipped {skippedCount:N0} unchanged files"); await WriteManifest(properties, newFileEntries); // delete files that are no longer needed await BatchDeleteFiles(properties.DestinationBucketName, oldFileEntries.Where(kv => !newFileEntries.ContainsKey(kv.Key)).Select(kv => Path.Combine(properties.DestinationKey, kv.Key)).ToList()); return(new Response <S3WriterResourceAttribute> { PhysicalResourceId = $"s3unzip:{properties.DestinationBucketName}:{properties.DestinationKey}", Attributes = new S3WriterResourceAttribute { Url = $"s3://{properties.DestinationBucketName}/{properties.DestinationKey}" } }); } }
private string ComputeEntryHash(ZipArchiveEntry entry, S3WriterResourceProperties properties) => $"{GetMD5AsHexString(entry)}-{DetermineEncodingType(entry.FullName, properties)}";
private string DetermineEncodingType(string filename, S3WriterResourceProperties properties) => properties.Encoding?.ToUpperInvariant() ?? "NONE";