public async Task Long_Message_Async_It_Is_Not_Stored_In_S3_If_IsLargePayloadSupportEnabled_Is_False() { var extendedClient = new AmazonSQSExtendedClient(sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportDisabled()); var body = GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD + 1); var messageRequest = new SendMessageRequest(SQS_QUEUE_NAME, body); await extendedClient.SendMessageAsync(messageRequest); s3Mock.Verify(s => s.PutObjectAsync(It.IsAny <PutObjectRequest>(), It.IsAny <CancellationToken>()), Times.Never); }
public void Long_Message_It_Is_Not_Stored_In_S3_If_IsLargePayloadSupportEnabled_Is_False() { var extendedClient = new AmazonSQSExtendedClient(sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportDisabled()); var body = GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD + 1); var messageRequest = new SendMessageRequest(SQS_QUEUE_NAME, body); extendedClient.SendMessage(messageRequest); s3Mock.Verify(s => s.PutObject(It.IsAny <PutObjectRequest>()), Times.Never); sqsMock.Verify(s => s.SendMessage(It.Is <SendMessageRequest>(r => r.MessageBody.Equals(body)))); }
public QueueClient(Env env) { this.env = env; var credentials = new BasicAWSCredentials(env.SEARCH_AWS_ACCESSKEY, env.SEARCH_AWS_SECRETACCESSKEY); var region = RegionEndpoint.GetBySystemName(env.SEARCH_AWS_REGION); this.s3 = new AmazonS3Client(credentials, region); this.sqs = new AmazonSQSClient(credentials, region); this.client = new AmazonSQSExtendedClient(sqs, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3, env.SQS_PAYLOAD_BUCKET)); }
public SearchIndexingQueueService(ISearchConfiguration searchConfiguration) { _jsonSettings = new JsonSerializerSettings() { NullValueHandling = NullValueHandling.Ignore, DefaultValueHandling = DefaultValueHandling.Ignore }; _searchConfiguration = searchConfiguration; _sqsExtendedClient = CreateExtendedClient(); }
public void Short_Message_It_Is_Stored_In_S3_If_AlwaysThroughS3_Configured() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME).WithAlwaysThroughS3(true)); var body = GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD + 1); var messageRequest = new SendMessageRequest(SQS_QUEUE_NAME, body); extendedClient.SendMessage(messageRequest); s3Mock.Verify(s => s.PutObject(It.IsAny <PutObjectRequest>()), Times.Once); sqsMock.Verify(s => s.SendMessage(It.Is <SendMessageRequest>(r => MessagePointerIsCorrect(r.MessageBody) && LargePayloadAttributeIsAdded(r.MessageAttributes)))); }
public async Task Short_Message_Async_It_Is_Stored_In_S3_If_Exceeds_Threshold() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME).WithMessageSizeThreshold(100)); var body = GenerateLongString(101); var messageRequest = new SendMessageRequest(SQS_QUEUE_NAME, body); await extendedClient.SendMessageAsync(messageRequest); s3Mock.Verify(s => s.PutObjectAsync(It.IsAny <PutObjectRequest>(), It.IsAny <CancellationToken>()), Times.Once); sqsMock.Verify(s => s.SendMessageAsync(It.Is <SendMessageRequest>(r => MessagePointerIsCorrect(r.MessageBody) && LargePayloadAttributeIsAdded(r.MessageAttributes)), default(CancellationToken)), Times.Once); }
public async Task Long_Message_Async_It_Is_Deleted_From_s3_When_RetainS3Messages_Is_Set() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration() .WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME) .WithRetainS3Messages(true)); var s3Key = Guid.NewGuid().ToString("N"); var longReceiptHandle = GenerateReceiptHandle(S3_BUCKET_NAME, s3Key, Constants.HandleTail); await extendedClient.DeleteMessageAsync(new DeleteMessageRequest(SQS_QUEUE_NAME, longReceiptHandle)); s3Mock.Verify(m => m.DeleteObjectAsync(It.IsAny <string>(), It.IsAny <string>(), It.IsAny <CancellationToken>()), Times.Never); sqsMock.Verify(m => m.DeleteMessageAsync(It.Is <DeleteMessageRequest>(r => r.QueueUrl.Equals(SQS_QUEUE_NAME) && r.ReceiptHandle.Equals(Constants.HandleTail)), It.IsAny <CancellationToken>())); }
public void Long_Message_It_Is_Not_Stored_In_S3_If_IsLargePayloadSupportEnabled_Is_False() { var extendedClient = new AmazonSQSExtendedClient(sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportDisabled()); var batchRequest = new SendMessageBatchRequest(SQS_QUEUE_NAME, new List <SendMessageBatchRequestEntry>()); var body = GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD + 1); for (var i = 0; i < 3; i++) { batchRequest.Entries.Add(new SendMessageBatchRequestEntry(Guid.NewGuid().ToString("N"), body)); } extendedClient.SendMessageBatch(batchRequest); s3Mock.Verify(s => s.PutObject(It.IsAny <PutObjectRequest>()), Times.Never); sqsMock.Verify(s => s.SendMessageBatch(It.Is <SendMessageBatchRequest>(r => r.Entries.All(e => e.MessageBody.Equals(body) && !e.MessageAttributes.ContainsKey(SQSExtendedClientConstants.RESERVED_ATTRIBUTE_NAME))))); }
public async Task Long_Messages_Async_They_Are_Deleted_From_SQS_Only_If_RetainS3Messages_Configured() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration() .WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME) .WithRetainS3Messages(true)); var s3Key = Guid.NewGuid().ToString("N"); var longReceiptHandle = GenerateReceiptHandle(S3_BUCKET_NAME, s3Key, Constants.HandleTail); var entries = Enumerable.Repeat(0, 3).Select(_ => new DeleteMessageBatchRequestEntry(Guid.NewGuid().ToString("N"), longReceiptHandle)).ToList(); await extendedClient.DeleteMessageBatchAsync(new DeleteMessageBatchRequest(SQS_QUEUE_NAME, entries)); s3Mock.Verify(m => m.DeleteObjectAsync(It.IsAny <string>(), It.IsAny <string>(), It.IsAny <CancellationToken>()), Times.Never); sqsMock.Verify(m => m.DeleteMessageBatchAsync(It.Is <DeleteMessageBatchRequest>(r => r.Entries.All(e => e.ReceiptHandle.Equals(Constants.HandleTail))), It.IsAny <CancellationToken>()), Times.Once); }
static async Task UpsertDatahubAssetExample(AmazonSQSExtendedClient client) { // index an asset with several large PDF resources var pdf = File.ReadAllBytes(@"../Datahub.Web/data/OffshoreBrighton_SACO_V1.0.pdf"); var pdfEncoded = Convert.ToBase64String(pdf); var resources = from i in Enumerable.Range(1, 50) select new { title = "An example searchable PDF resource #" + i, url = "http://example.com/pages/123456789", // the URL of the page, for clicking through keywords = new [] { new { vocab = "http://vocab.jncc.gov.uk/jncc-web", value = "Example" } }, file_base64 = pdfEncoded, // base-64 encoded file file_extension = "pdf", // when this is a downloadable file_bytes = "1048576", // file such as a PDF, etc. published_date = "2019-02-15", }; var bigMessage = new { verb = "upsert", index = "dev", document = new { id = "123456789", site = "datahub", // as opposed to website|sac|mhc title = "An example searchable document with resources :-)", content = "This is a searchable document made purely for example purposes.", url = "http://example.com/pages/123456789", // the URL of the page, for clicking through keywords = new [] { new { vocab = "http://vocab.jncc.gov.uk/jncc-web", value = "Example" } }, published_date = "2019-01-14", }, resources = resources.ToArray(), }; var basicResponse = await client.SendMessageAsync(Env.Var.SqsEndpoint, JsonConvert.SerializeObject(bigMessage, Formatting.None) ); Console.WriteLine(basicResponse.MessageId); }
public async Task Short_Message_Async_It_Is_Stored_In_S3_If_Exceeds_Threshold() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME).WithMessageSizeThreshold(100)); var batchRequest = new SendMessageBatchRequest(SQS_QUEUE_NAME, new List <SendMessageBatchRequestEntry>()); var body = GenerateLongString(101); for (var i = 0; i < 3; i++) { batchRequest.Entries.Add(new SendMessageBatchRequestEntry(Guid.NewGuid().ToString("N"), body)); } await extendedClient.SendMessageBatchAsync(batchRequest); s3Mock.Verify(s => s.PutObjectAsync(It.IsAny <PutObjectRequest>(), It.IsAny <CancellationToken>()), Times.Exactly(3)); sqsMock.Verify(s => s.SendMessageBatchAsync(It.Is <SendMessageBatchRequest>(r => r.Entries.All(e => MessagePointerIsCorrect(e.MessageBody) && LargePayloadAttributeIsAdded(e.MessageAttributes))), default(CancellationToken))); }
public async Task Long_Message_Async_S3KeyProvider_Is_Used_If_Configured() { var mockS3Provider = new Mock <IS3KeyProvider>(); mockS3Provider.Setup(m => m.GenerateName()).Returns("CustomPrefix" + Guid.NewGuid().ToString("N")); var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration() .WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME) .WithS3KeyProvider(mockS3Provider.Object)); var body = GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD + 1); var messageRequest = new SendMessageRequest(SQS_QUEUE_NAME, body); await extendedClient.SendMessageAsync(messageRequest); mockS3Provider.Verify(s => s.GenerateName(), Times.Once); s3Mock.Verify(s => s.PutObjectAsync(It.Is <PutObjectRequest>(r => r.Key.StartsWith("CustomPrefix")), It.IsAny <CancellationToken>()), Times.Once); sqsMock.Verify(s => s.SendMessageAsync(It.Is <SendMessageRequest>(r => MessagePointerIsCorrect(r.MessageBody, "CustomPrefix") && LargePayloadAttributeIsAdded(r.MessageAttributes)), It.IsAny <CancellationToken>()), Times.Once); }
public void Short_Message_It_Is_Stored_In_S3_If_AlwaysThroughS3_Configured() { var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME).WithAlwaysThroughS3(true)); var batchRequest = new SendMessageBatchRequest(SQS_QUEUE_NAME, new List <SendMessageBatchRequestEntry>()); var body = GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD); for (var i = 0; i < 3; i++) { batchRequest.Entries.Add(new SendMessageBatchRequestEntry(Guid.NewGuid().ToString("N"), body)); } extendedClient.SendMessageBatch(batchRequest); s3Mock.Verify(s => s.PutObject(It.IsAny <PutObjectRequest>()), Times.Exactly(3)); sqsMock.Verify( s => s.SendMessageBatch( It.Is <SendMessageBatchRequest>(r => r.Entries.All(e => MessagePointerIsCorrect(e.MessageBody) && LargePayloadAttributeIsAdded(e.MessageAttributes))))); }
static async Task DeleteExample(AmazonSQSExtendedClient client) { var deleteMessage = new { verb = "delete", index = "test", document = new { id = "123456789", site = "website", } }; var deleteResponse = await client.SendMessageAsync(Env.Var.SqsEndpoint, JsonConvert.SerializeObject(deleteMessage, Formatting.None) ); Console.WriteLine(deleteResponse.MessageId); }
public async Task Long_Message_Async_S3KeyProvider_Is_Used_If_Configured() { var mockS3Provider = new Mock <IS3KeyProvider>(); mockS3Provider.Setup(m => m.GenerateName()).Returns(Constants.CustomPrefix + Guid.NewGuid().ToString("N")); var extendedClient = new AmazonSQSExtendedClient( sqsMock.Object, new ExtendedClientConfiguration() .WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME) .WithS3KeyProvider(mockS3Provider.Object)); var batchRequest = new SendMessageBatchRequest(SQS_QUEUE_NAME, new List <SendMessageBatchRequestEntry>()); for (var i = 0; i < 3; i++) { batchRequest.Entries.Add(new SendMessageBatchRequestEntry(Guid.NewGuid().ToString("N"), GenerateLongString(SQSExtendedClientConstants.DEFAULT_MESSAGE_SIZE_THRESHOLD + 1))); } await extendedClient.SendMessageBatchAsync(batchRequest); s3Mock.Verify(s => s.PutObjectAsync(It.Is <PutObjectRequest>(r => r.Key.StartsWith(Constants.CustomPrefix)), It.IsAny <CancellationToken>()), Times.Exactly(3)); sqsMock.Verify(s => s.SendMessageBatchAsync(It.Is <SendMessageBatchRequest>(r => r.Entries.All(e => MessagePointerIsCorrect(e.MessageBody, Constants.CustomPrefix) && LargePayloadAttributeIsAdded(e.MessageAttributes))), default(CancellationToken))); }
static void Main(string[] args) { DotEnv.Config(); Console.WriteLine("Hello World!"); var credentials = new BasicAWSCredentials(Env.Var.AwsAccessKey, Env.Var.AwsSecretAccessKey); var region = RegionEndpoint.GetBySystemName(Env.Var.AwsRegion); var s3 = new AmazonS3Client(credentials, region); var sqs = new AmazonSQSClient(credentials, region); var sqsExtendedClient = new AmazonSQSExtendedClient(sqs, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3, Env.Var.SqsPayloadBucket) ); string command = args.FirstOrDefault(); if (command == "upsert-simple") { UpsertSimpleExample(sqsExtendedClient).GetAwaiter().GetResult(); } else if (command == "delete-simple") { DeleteExample(sqsExtendedClient).GetAwaiter().GetResult(); } else if (command == "upsert-pdf") { UpsertPdfExample(sqsExtendedClient).GetAwaiter().GetResult(); } else if (command == "upsert-datahub-asset") { UpsertDatahubAssetExample(sqsExtendedClient).GetAwaiter().GetResult(); } else { throw new Exception("Please specify command (see code / readme)."); } }
public static async Task <SendMessageResponse> SendMessage(AmazonSQSExtendedClient client, string endpoint, string message) { return(await client.SendMessageAsync(endpoint, message)); }
public void SetUp() { s3Mock = new Mock <IAmazonS3>(); sqsMock = new Mock <IAmazonSQS>(); client = new AmazonSQSExtendedClient(sqsMock.Object, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3Mock.Object, S3_BUCKET_NAME)); }
static void ProcessAssetLists() { Console.WriteLine("sqs endpoint: {0}", Env.Var.SqsEndpoint); Console.WriteLine("sqs s3 bucket", Env.Var.SqsPayloadBucket); int errors = 0; using (var s3 = GetS3Client()) using (var sqs = GetSQSClient()) using (var sqsExtendedClient = new AmazonSQSExtendedClient(sqs, new ExtendedClientConfiguration().WithLargePayloadSupportEnabled(s3, Env.Var.SqsPayloadBucket) )) { foreach (var assetList in Env.Var.AssetLists) { Console.WriteLine("Processing {0}", assetList.Url); List <Asset> assets; try { assets = GetAssetList(assetList.Url); } catch (Exception e) { Console.WriteLine("Unable to get asset list from {0}. Error: {1}", assetList.Url, e.Message); errors++; continue; } for (int i = 0; i < assets.Count; i++) { var asset = assets[i]; var assetId = String.Format("{0}-{1}", assetList.IdPrefix, asset.Id); Console.WriteLine("Processing Asset {0}, {1} {2}", i + 1, assetId, asset.Title); if (AssetValidator.IsValid(asset, errors)) { var assetFileUrl = GetFileUrl(assetList.Url, asset.FileName); AssetFile file; try { file = GetAssetFile(assetFileUrl, asset); } catch (Exception e) { Console.WriteLine("Unable to get asset file from {0}. Error: {1}", assetFileUrl, e.Message); errors++; continue; } var culture = CultureInfo.CreateSpecificCulture("en-GB"); var style = DateTimeStyles.None; var message = new { verb = "upsert", index = Env.Var.EsIndex, document = new { id = assetId, site = Env.Var.EsSite, title = asset.Title, url = file.Url, published_date = DateTime.Parse(asset.PublicationDate, culture, style).ToString("yyyy-MM-dd"), file_base64 = file.EncodedFile, // base-64 encoded file file_extension = file.Extension, // when this is a downloadable file_bytes = file.Bytes.ToString(), // file such as a PDF, etc. } }; var response = sqsExtendedClient.SendMessageAsync(Env.Var.SqsEndpoint, JsonConvert.SerializeObject(message, Formatting.None) ).GetAwaiter().GetResult(); Console.WriteLine("Created Message Id {0}", response.MessageId); if (Env.Var.AssetQueryDelay > 0) { Console.WriteLine("Waiting {0}ms before getting next asset", Env.Var.AssetQueryDelay); Thread.Sleep(Env.Var.AssetQueryDelay); } } } } ; } if (errors > 0) { Console.WriteLine("{0} errors occured during processing", errors); } else { Console.WriteLine("Indexing Complete, no errors"); } }