/// <summary> /// Decrypts a specified csv file, which was created by XtractUniversal with column encryption enabled. /// </summary> /// <param name="args"></param> /// <returns></returns> private static async Task Main(string[] args) { if (!GetInfoFromArgs(args)) { return; } // build metadata file name metaDataFile = metaDataFile ?? $"{Path.GetFileNameWithoutExtension(sourceFile)}_metadata.json"; ivArray = ArrayPool <byte> .Shared.Rent(12); await using FileStream metaDataStream = File.Open(metaDataFile, FileMode.Open, FileAccess.Read); using StreamReader reader = new StreamReader(metaDataStream); using CsvProcessor csvProcessor = new CsvProcessor(await reader.ReadToEndAsync()); string keyXml = await File.ReadAllTextAsync(keyFile); using RSACryptoServiceProvider privateKey = new RSACryptoServiceProvider(); privateKey.FromXmlString(keyXml); byte[] sessionKey = privateKey.Decrypt(csvProcessor.EncryptedSessionKey, true); await using FileStream target = File.Open(targetFile, FileMode.Create, FileAccess.Write); await using FileStream fs = File.Open(sourceFile, FileMode.Open, FileAccess.Read); using (aesGcm = new AesGcm(sessionKey)) { await csvProcessor.ProcessDataAsync(DecryptCell, fs.ReadAsync, target.WriteAsync, CancellationToken.None); ArrayPool <byte> .Shared.Return(ivArray); } }
public static async Task Run( [BlobTrigger("<yourStorageBlob>/{name}", Connection = "AzureWebJobsStorage")] Stream encryptedFile, string name, ILogger log) { // Saving the start time for diagnostics DateTime startTime = DateTime.Now; try { // The function triggers for every uploaded file. // We just exclude the metadata file, to have maximum flexibility. // Please adjust the trigger or filter for files to ignore to have the function match your purpose. if (name.Contains(".json") || name.Contains("metadata")) { log.LogInformation("Metadata file upload. Exiting function."); return; } ivArray = ArrayPool <byte> .Shared.Rent(12); // preparing decryption log.LogInformation("Loading account and container info..."); string conString = Environment.GetEnvironmentVariable("AzureWebJobsStorage"); sourceContainerName = Environment.GetEnvironmentVariable("SourceContainer"); targetContainerName = Environment.GetEnvironmentVariable("TargetContainer"); privateKeyFileName = Environment.GetEnvironmentVariable("PrivateKeyFileName"); metadataFileName = $"{Path.GetFileNameWithoutExtension(name)}_metadata.json"; // Remove this check if you are not using a connection string if (string.IsNullOrWhiteSpace(conString)) { throw new InvalidOperationException("No connection string was specified."); } if (string.IsNullOrWhiteSpace(privateKeyFileName)) { throw new InvalidOperationException("No private key file was specified."); } if (string.IsNullOrWhiteSpace(sourceContainerName)) { throw new InvalidOperationException("No source container was specified."); } if (string.IsNullOrWhiteSpace(targetContainerName)) { throw new InvalidOperationException("No target container was specified."); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(conString); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); sourceContainer = blobClient.GetContainerReference(sourceContainerName); targetContainer = blobClient.GetContainerReference(targetContainerName); log.LogInformation($"C# Blob trigger function Processed blob\n Name: {name}\n Size: {encryptedFile.Length} Bytes"); if (encryptedFile.Length == 0) { for (int retriesRemaining = 3; retriesRemaining > 0; retriesRemaining--) { if (encryptedFile.Length > 0) { break; } else { log.LogInformation("No data on the stream yet. Retrying in two seconds."); await Task.Delay(2000); } } if (encryptedFile.Length == 0) { log.LogInformation("No data received."); return; } } log.LogInformation("Loading metadata..."); log.LogInformation($"Expected metadata file name: {metadataFileName}"); using CsvProcessor csvProcessor = new CsvProcessor(await GetMetaDataAsync()); log.LogInformation("Decrypting session key..."); using RSACryptoServiceProvider privateKey = new RSACryptoServiceProvider(); privateKey.FromXmlString(await GetPrivateKeyAsync()); // decryption AES session key byte[] sessionKey = privateKey.Decrypt(csvProcessor.EncryptedSessionKey, true); log.LogInformation("Opening target stream..."); CloudBlockBlob plainTextBlob = targetContainer.GetBlockBlobReference(name); await using CloudBlobStream uploadStream = await plainTextBlob.OpenWriteAsync(); // process and decrypt the data. log.LogInformation("Decrypting data..."); using (aesGcm = new AesGcm(sessionKey)) { await csvProcessor.ProcessDataAsync(DecryptCell, encryptedFile.ReadAsync, uploadStream.WriteAsync, CancellationToken.None); } log.LogInformation("Wrapping up upload to destination blob."); await uploadStream.CommitAsync(); } catch (Exception e) { log.LogError(e.ToString()); } finally { // cleanup some resources if (ivArray != null) { ArrayPool <byte> .Shared.Return(ivArray); } log.LogInformation($"Function started at {startTime} terminated."); } }
private readonly int writeThreshold = 6 * (int)Math.Pow(2, 20); // 5 MB #endregion /// <summary> /// You can modify this code as you wish. /// /// Make sure not to change the decryption api interface logic to ensure the successful decryption of your data. /// </summary> /// <param name="input"></param> /// <param name="context"></param> /// <returns></returns> /// <exception cref="InvalidOperationException"></exception> /// <exception cref="FileNotFoundException"></exception> public async Task <bool> FunctionHandler(S3EventNotification input, ILambdaContext context) { try { ivArray = ArrayPool <byte> .Shared.Rent(12); this.targetFileName = this.sourceFileName = input.Records[0].S3.Object.Key; LambdaLogger.Log("Loading and checking source/destination buckets, file name..."); this.sourceBucketName = Environment.GetEnvironmentVariable("sourcebucket"); this.targetBucketName = Environment.GetEnvironmentVariable("targetbucket"); this.keyId = Environment.GetEnvironmentVariable("privatekeyid"); // validate information ValidateEnvironment(); LambdaLogger.Log(Environment.GetEnvironmentVariable("AWS_REGION")); LambdaLogger.Log("Loading ciphertext..."); GetObjectRequest readRequest = new GetObjectRequest { BucketName = this.sourceBucketName, Key = this.sourceFileName, }; this.client = new AmazonS3Client(RegionEndpoint.USEast1); using GetObjectResponse response = await this.client.GetObjectAsync(readRequest); if (response.HttpStatusCode != HttpStatusCode.OK) { throw new FileNotFoundException("Could not retrieve file from source bucket."); } LambdaLogger.Log("Loading metadata..."); using CsvProcessor csvProcessor = new CsvProcessor(await GetMetaDataAsync()); // decrypt aes session key byte[] sessionKey = await DecryptSessionKey(csvProcessor.EncryptedSessionKey); LambdaLogger.Log( $"Preparing multipart upload with a minimal part size of {this.writeThreshold.ToString()} bytes"); this.outputStream = new MemoryStream(this.partSize); await InitPartUploadAsync(); LambdaLogger.Log("Decrypting..."); using (this.aesGcm = new AesGcm(sessionKey)) { await csvProcessor.ProcessDataAsync(DecryptCell, response.ResponseStream.ReadAsync, WritePartAsync, CancellationToken.None); } LambdaLogger.Log("Completing multipart upload..."); if (this.outputStream.Length > 0) { await WritePartInternalAsync(); } await CompleteMultipartUploadAsync(); return(true); } catch (Exception ex) { LambdaLogger.Log($"Exception in PutS3Object: {ex}"); if (!string.IsNullOrWhiteSpace(this.uploadId)) { // Abort the upload. AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest { BucketName = this.targetBucketName, Key = this.targetFileName, UploadId = this.uploadId }; await this.client.AbortMultipartUploadAsync(abortRequest); } return(false); } finally { // cleanup some resources ArrayPool <byte> .Shared.Return(ivArray); this.client?.Dispose(); } }