private void DeleteTest_InitDelayTest() { var bucket = _fixture.SingleVersionBucket; var name = IdGenerator.FromGuid(); string url = null; _fixture.RegisterDelayTest(_duration, beforeDelay: async duration => { url = _fixture.UrlSigner.Sign(bucket, name, duration, HttpMethod.Delete); // Upload an object which can be deleted with the URL. await _fixture.Client.UploadObjectAsync(bucket, name, "", new MemoryStream(_fixture.SmallContent)); // Verify that the URL works initially. var response = await _fixture.HttpClient.DeleteAsync(url); await VerifyResponseAsync(response); var obj = await _fixture.Client.ListObjectsAsync(bucket, name).FirstOrDefault(o => o.Name == name); Assert.Null(obj); // Restore the object. await _fixture.Client.UploadObjectAsync(bucket, name, "", new MemoryStream(_fixture.SmallContent)); }, afterDelay: async() => { // Verify that the URL no longer works. var response = await _fixture.HttpClient.DeleteAsync(url); Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); var obj = await _fixture.Client.ListObjectsAsync(bucket, name).FirstOrDefault(o => o.Name == name); Assert.NotNull(obj); // Cleanup await _fixture.Client.DeleteObjectAsync(bucket, name); }); }
public async Task CanDownloadDataBlobWithShortId() { var dataBlob = new DataBlob(IdGenerator.FromGuid(), new byte[] { 0x01, 0x02, 0x03 }, "testFile.bin"); var shortId = new ShortId("unittest_datablob", nameof(DataBlob), dataBlob.Id); if (!await analystDataApiClient.ExistsAsync <DataBlob>(dataBlob.Id)) { await analystDataApiClient.InsertAsync(dataBlob, dataBlob.Id); } if (!await analystDataApiClient.ExistsAsync <ShortId>(shortId.Id)) { await analystDataApiClient.InsertAsync(shortId, shortId.Id); } try { var httpHandler = new HttpClientHandler(); var httpClient = new HttpClient(httpHandler); var accessToken = analystAuthenticationResult.AccessToken; httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", accessToken); var uri = RequestUriBuilder.Build(ApiSetup.ApiConfiguration, "download/getFile"); uri += $"?shortId={Uri.EscapeDataString(shortId.Id)}"; var response = await httpClient.GetAsync(uri); Assert.That(response.StatusCode, Is.EqualTo(HttpStatusCode.OK)); Assert.That(response.Content.Headers.ContentDisposition, Is.Not.Null); var content = await response.Content.ReadAsByteArrayAsync(); CollectionAssert.AreEqual(dataBlob.Data, content); } finally { await analystDataApiClient.DeleteAsync <ShortId>(shortId.Id); await TryDeleteDataBlob(dataBlob.Id); } }
private static void GetObjectWithSpacesTest_Common(StorageFixture fixture, UrlSigner signer, [CallerMemberName] string caller = null) { var bucket = fixture.SingleVersionBucket; var name = IdGenerator.FromGuid() + " with spaces"; var content = fixture.SmallContent; string url = null; fixture.RegisterDelayTest( s_duration, beforeDelay: async duration => { fixture.Client.UploadObject(bucket, name, null, new MemoryStream(content)); url = signer.Sign(bucket, name, duration); // Verify that the URL works initially. var response = await fixture.HttpClient.GetAsync(url); await VerifyResponseAsync(response); var result = await response.Content.ReadAsByteArrayAsync(); AssertContentEqual(content, result); }, afterDelay: async() => { // Verify that the URL no longer works. var response = await fixture.HttpClient.GetAsync(url); Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); }, caller); }
public void InsertRow_RecordField() { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); var table = dataset.GetTable(_fixture.ComplexTypesTableId); var guid = IdGenerator.FromGuid(); var row = new BigQueryInsertRow { ["guid"] = guid, ["position"] = new BigQueryInsertRow { ["x"] = 10L, ["y"] = 20L } }; _fixture.InsertAndWait(table, () => table.InsertRow(row), 1); string sql = $"SELECT guid, position.x, position.y FROM {table} WHERE guid=@guid"; var parameters = new[] { new BigQueryParameter("guid", BigQueryDbType.String, guid) }; var resultRows = client.ExecuteQuery(sql, parameters) .Select(r => new { Guid = (string)r["guid"], X = (long)r["x"], Y = (long)r["y"] }) .ToList(); var expectedResults = new[] { new { Guid = guid, X = 10L, Y = 20L } }; Assert.Equal(expectedResults, resultRows); }
public void InsertRow_RecordRepeatedField() { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); var table = dataset.GetTable(_fixture.ComplexTypesTableId); var guid = IdGenerator.FromGuid(); var row = new BigQueryInsertRow { ["guid"] = guid, ["job"] = new BigQueryInsertRow { ["company"] = "Pet Store", ["roles"] = new[] { "cashier", "manager" } }, }; _fixture.InsertAndWait(table, () => table.InsertRow(row), 1); string sql = $"SELECT job FROM {table} WHERE guid=@guid"; var parameters = new[] { new BigQueryParameter("guid", BigQueryDbType.String, guid) }; var fetchedRow = client.ExecuteQuery(sql, parameters) .Single(); var job = (Dictionary <string, object>)fetchedRow["job"]; Assert.Equal("Pet Store", (string)job["company"]); Assert.Equal(new[] { "cashier", "manager" }, (string[])job["roles"]); }
public void UpdateBucket() { var projectId = _fixture.ProjectId; var setupClient = StorageClient.Create(); // GCS bucket names must be globally unique var bucketName = IdGenerator.FromGuid(); setupClient.CreateBucket(projectId, bucketName); StorageSnippetFixture.SleepAfterBucketCreateDelete(); _fixture.RegisterBucketToDelete(bucketName); // Snippet: UpdateBucket var client = StorageClient.Create(); var bucket = client.GetBucket(bucketName); bucket.Website = new Bucket.WebsiteData { MainPageSuffix = "index.html", NotFoundPage = "404.html" }; client.UpdateBucket(bucket); // End snippet // Fetch the bucket again to check that the change "stuck" var fetchedBucket = client.GetBucket(bucketName); Assert.Equal(bucketName, fetchedBucket.Name); Assert.Equal(bucket.Website.MainPageSuffix, fetchedBucket.Website.MainPageSuffix); }
public void ReplaceObject() { var client = _fixture.Client; var bucket = _fixture.MultiVersionBucket; var name = IdGenerator.FromGuid(); var contentType = "application/octet-stream"; var source1 = GenerateData(100); var firstVersion = client.UploadObject(bucket, name, contentType, source1); ValidateData(_fixture.MultiVersionBucket, name, source1); var source2 = GenerateData(50); firstVersion.ContentType = "application/x-replaced"; // Clear hash and cache information, as we're changing the data. firstVersion.Crc32c = null; firstVersion.ETag = null; firstVersion.Md5Hash = null; var secondVersion = client.UploadObject(firstVersion, source2); ValidateData(_fixture.MultiVersionBucket, name, source2); Assert.NotEqual(firstVersion.Generation, secondVersion.Generation); Assert.Equal(firstVersion.ContentType, secondVersion.ContentType); // The modified content type should stick // When we ask for the first generation, we get the original data back. var firstGenerationData = new MemoryStream(); client.DownloadObject(firstVersion, firstGenerationData, new DownloadObjectOptions { Generation = firstVersion.Generation }, null); Assert.Equal(source1.ToArray(), firstGenerationData.ToArray()); }
public async Task Logging_ScopeFormatParameter() { string testId = IdGenerator.FromGuid(); using (var server = GetTestServer <NoBufferWarningLoggerTestApplication>()) using (var client = server.CreateClient()) { await client.GetAsync($"/Main/ScopeFormatParameters/{testId}"); } _fixture.AddValidator(testId, results => { var message = EntryData.GetMessage(nameof(MainController.ScopeFormatParameters), testId); var json = results.Single().JsonPayload.Fields; Assert.Equal(message, json["message"].StringValue); var parentScopes = json["parent_scopes"]?.ListValue?.Values; Assert.NotNull(parentScopes); var expectedScope = Value.ForStruct(new Struct { Fields = { { "id", Value.ForString(testId) }, { "{OriginalFormat}", Value.ForString(nameof(MainController.ScopeFormatParameters) + " - {id}") } } }); Assert.Single(parentScopes, expectedScope); }); }
public async Task Logging_Trace_FromHeader_Implicit() { string traceId = s_traceIdFactory.NextId(); ulong spanId = s_spanIdFactory.NextId(); string testId = IdGenerator.FromGuid(); using (var server = GetTestServer <NoBufferWarningLoggerTestApplication>()) using (var client = server.CreateClient()) { client.DefaultRequestHeaders.Add(TraceHeaderContext.TraceHeader, TraceHeaderContext.Create(traceId, spanId, true).ToString()); await client.GetAsync($"/Main/Critical/{testId}"); } _fixture.AddValidator(testId, results => { // We only have one log entry. LogEntry entry = Assert.Single(results); // And the resource name of the trace associated to it points to the trace // we specified on the header. Assert.Contains(TestEnvironment.GetTestProjectId(), entry.Trace); Assert.Contains(traceId, entry.Trace); // Let's get our trace. var trace = s_tracePolling.GetTrace(traceId); Assert.NotNull(trace); // The span associated to our entry needs to be part of that trace. // (We created this span on the middleware to encompass the whole request) var entrySpan = Assert.Single(trace.Spans, s => EntryData.SpanIdToHex(s.SpanId) == entry.SpanId); // And its parent needs to be the span specified in the header Assert.Equal(spanId, entrySpan.ParentSpanId); }); }
public void AdapterOverrideSelect() { RetryHelpers.RetryOnce(() => { using (var connection = _fixture.GetConnection()) { var adapter = new SpannerDataAdapter(connection, _fixture.TableName, "Key") { SelectCommand = connection.CreateSelectCommand( $"SELECT * FROM {_fixture.TableName} WHERE Key='k2'") }; //Load var testDataSet = new DataSet(); adapter.Fill(testDataSet); Assert.Equal(1, testDataSet.Tables[0].Rows.Count); //update, reload (update still works even with an overloaded selectcommand) string newValue = IdGenerator.FromGuid(); testDataSet.Tables[0].Rows[0]["StringValue"] = newValue; adapter.Update(testDataSet); testDataSet.Clear(); adapter.Fill(testDataSet); Assert.Equal(newValue, testDataSet.Tables[0].Rows[0]["StringValue"]); } }); }
public void AdapterUpdate() { RetryHelpers.RetryOnce(() => { using (var connection = _fixture.GetConnection()) { var adapter = new SpannerDataAdapter(connection, _fixture.TableName, "Key"); //Load var testDataSet = new DataSet(); adapter.Fill(testDataSet); //update, reload var newValue = IdGenerator.FromGuid(); var oldKey = testDataSet.Tables[0].Rows[1]["Key"]; testDataSet.Tables[0].Rows[1]["StringValue"] = newValue; adapter.Update(testDataSet); testDataSet.Clear(); adapter.Fill(testDataSet); int i = 0; for (; i < testDataSet.Tables[0].Rows.Count; i++) { if (testDataSet.Tables[0].Rows[i]["Key"].Equals(oldKey)) { break; } } var row = testDataSet.Tables[0].Rows.Cast <DataRow>() .FirstOrDefault(r => r["Key"].Equals(oldKey)); Assert.NotNull(row); Assert.Equal(newValue, row["StringValue"]); } }); }
public async Task InsertRowsAsync( TableName tableName, IEnumerable <BigtableByteString> rowKeys, string familyName = null, BigtableByteString?qualifierName = null, BigtableByteString?valuePrefix = null, BigtableVersion?version = null) { BigtableByteString rowKey = IdGenerator.FromGuid(); familyName = familyName ?? DefaultColumnFamily; qualifierName = qualifierName ?? "row_index"; valuePrefix = valuePrefix ?? ""; int counter = 0; var response = await TableClient.MutateRowsAsync( tableName, rowKeys.Select(k => Mutations.CreateEntry( k.Value, Mutations.SetCell( familyName, qualifierName.Value, valuePrefix.Value.Value.Concat( new BigtableByteString(counter++).Value), version))).ToArray()); var entries = response.Entries.OrderBy(e => e.Index); Assert.All(entries, e => Assert.Equal((int)Code.Ok, e.Status.Code)); }
public async Task <BigtableByteString> InsertRowAsync( TableName tableName, string familyName = null, BigtableByteString?qualifierName = null, BigtableByteString?value = null, BigtableVersion?version = null) { BigtableByteString rowKey = IdGenerator.FromGuid(); familyName = familyName ?? DefaultColumnFamily; qualifierName = qualifierName ?? DefaultColumnQualifier; value = value ?? DefaultValue; await TableClient.MutateRowAsync( tableName, rowKey, Mutations.SetCell( familyName, qualifierName.Value, value.Value, version)); await BigtableAssert.HasSingleValueAsync( TableClient, tableName, rowKey, familyName, qualifierName.Value, value.Value, version); return(rowKey); }
private void ResumableUploadResumeTest_InitDelayTest() { var bucket = _fixture.SingleVersionBucket; var name = IdGenerator.FromGuid(); var content = _fixture.SmallContent; string url = null; _fixture.RegisterDelayTest(_duration, beforeDelay: async duration => { url = _fixture.UrlSigner.Sign(bucket, name, duration, UrlSigner.ResumableHttpMethod); var sessionUri = await SignedUrlResumableUpload.InitiateSessionAsync(url); // Verify that the URL works initially. var uploader = ResumableUpload.CreateFromUploadUri(sessionUri, new MemoryStream(content)); var progress = await uploader.ResumeAsync(sessionUri); Assert.Null(progress.Exception); Assert.Equal(UploadStatus.Completed, progress.Status); var result = new MemoryStream(); await _fixture.Client.DownloadObjectAsync(bucket, name, result); AssertContentEqual(content, result.ToArray()); // Reset the state. await _fixture.Client.DeleteObjectAsync(bucket, name); }, afterDelay: async() => { // Verify that the URL no longer works. await Assert.ThrowsAsync <GoogleApiException>(() => SignedUrlResumableUpload.InitiateSessionAsync(url)); var obj = await _fixture.Client.ListObjectsAsync(bucket, name).FirstOrDefault(o => o.Name == name); Assert.Null(obj); }); }
public void RoundtripPreservesData() { var sut = new BinaryDataObjectSplitter(nameof(DataBlob.Data)); var id = IdGenerator.FromGuid(); var dataBlob = new DataBlob(id, new byte[] { 0x42, 0x43, 0x44 }, "myFile.bin"); var payload = DataEncoder.Encode(JsonConvert.SerializeObject(dataBlob)); var utcNow = DateTime.UtcNow; var container = new GenericDataContainer(id, "jdoe", utcNow, "jdoe", utcNow, "1.5.9", payload); BinaryDataObjectSplitterResult result = null; Assert.That(() => result = sut.Split(container), Throws.Nothing); Assert.That(result, Is.Not.Null); Assert.That(result.BinaryData, Is.EqualTo(dataBlob.Data)); var containerWithoutBinaryData = result.ContainerWithoutBinaryData; Assert.That(containerWithoutBinaryData.Id, Is.EqualTo(container.Id)); Assert.That(containerWithoutBinaryData.Submitter, Is.EqualTo(container.Submitter)); Assert.That(containerWithoutBinaryData.SubmissionTimeUtc, Is.EqualTo(container.SubmissionTimeUtc)); Assert.That(containerWithoutBinaryData.ApiVersion, Is.EqualTo(container.ApiVersion)); Assert.That(containerWithoutBinaryData.Data, Is.Not.Null); Assert.That(containerWithoutBinaryData.Data.GetValue(nameof(DataBlob.Data)), Is.EqualTo(BsonString.Empty)); var reconstructedContainer = sut.Reassemble(containerWithoutBinaryData, result.BinaryData); Assert.That(reconstructedContainer.Id, Is.EqualTo(container.Id)); Assert.That(reconstructedContainer.Submitter, Is.EqualTo(container.Submitter)); Assert.That(reconstructedContainer.SubmissionTimeUtc, Is.EqualTo(container.SubmissionTimeUtc)); Assert.That(reconstructedContainer.ApiVersion, Is.EqualTo(container.ApiVersion)); Assert.That(reconstructedContainer.Data, Is.Not.Null); var reconstructedDataBlob = JsonConvert.DeserializeObject <DataBlob>(reconstructedContainer.Data.ToJson()); Assert.That(reconstructedDataBlob.Id, Is.EqualTo(dataBlob.Id)); Assert.That(reconstructedDataBlob.Filename, Is.EqualTo(dataBlob.Filename)); Assert.That(reconstructedDataBlob.Data, Is.EqualTo(dataBlob.Data)); }
public async Task Logging_Trace() { Timestamp startTime = Timestamp.FromDateTime(DateTime.UtcNow); string testId = IdGenerator.FromGuid(); string spanPrefix; using (var server = GetTestServer <NoBufferWarningLoggerTracesAllTestApplication>()) using (var client = server.CreateClient()) { spanPrefix = await client.GetStringAsync($"/Main/{nameof(MainController.LogsInOneSpan)}/{testId}"); } _fixture.AddValidator(testId, results => { // Let's get our trace. var trace = s_tracePolling.GetTrace(spanPrefix, startTime); Assert.NotNull(trace); // We only have one log entry. LogEntry entry = Assert.Single(results); // And the resource name of the trace associated to it points to the trace // created during the call. Assert.Contains(TestEnvironment.GetTestProjectId(), entry.Trace); Assert.Contains(trace.TraceId, entry.Trace); // The span associated to our entry needs to be part of that trace. // (We created this span on the action call) var entrySpan = Assert.Single(trace.Spans, s => EntryData.SpanIdToHex(s.SpanId) == entry.SpanId); // And its parent is a span we create on the middleware to encompass the whole request Assert.NotEqual((ulong)0, entrySpan.ParentSpanId); }); }
private static void ResumableUploadWithCustomerSuppliedEncryptionKeysTest_Common(StorageFixture fixture, SigningVersion signingVersion, [CallerMemberName] string caller = null) { var bucket = fixture.SingleVersionBucket; var name = IdGenerator.FromGuid(); var requestTemplate = RequestTemplate .FromBucket(bucket) .WithObjectName(name) .WithHttpMethod(ResumableHttpMethod) .WithRequestHeaders(new Dictionary <string, IEnumerable <string> > { { "x-goog-encryption-algorithm", new [] { "AES256" } } }); var content = fixture.SmallContent; string url = null; EncryptionKey key = EncryptionKey.Generate(); fixture.RegisterDelayTest( s_duration, beforeDelay: async duration => { url = fixture.UrlSigner.Sign(requestTemplate, Options.FromDuration(duration).WithSigningVersion(signingVersion)); // Verify that the URL works initially. var uploader = SignedUrlResumableUpload.Create( url, new MemoryStream(content), new ResumableUploadOptions { ModifySessionInitiationRequest = key.ModifyRequest }); var progress = await uploader.UploadAsync(); Assert.Null(progress.Exception); Assert.Equal(UploadStatus.Completed, progress.Status); // Make sure the encryption succeeded. var downloadedData = new MemoryStream(); await Assert.ThrowsAsync <GoogleApiException>( () => fixture.Client.DownloadObjectAsync(bucket, name, downloadedData)); await fixture.Client.DownloadObjectAsync(bucket, name, downloadedData, new DownloadObjectOptions { EncryptionKey = key }); AssertContentEqual(content, downloadedData.ToArray()); }, afterDelay: async() => { var uploader = SignedUrlResumableUpload.Create( url, new MemoryStream(content), new ResumableUploadOptions { ModifySessionInitiationRequest = key.ModifyRequest }); // Verify that the URL no longer works. var progress = await uploader.UploadAsync(); Assert.Equal(UploadStatus.Failed, progress.Status); Assert.IsType <GoogleApiException>(progress.Exception); }, caller); }
public async Task Logging_Trace_MultipleSpans() { Timestamp startTime = Timestamp.FromDateTime(DateTime.UtcNow); string testId = IdGenerator.FromGuid(); string spanPrefix; using (var server = GetTestServer <NoBufferWarningLoggerTracesAllTestApplication>()) using (var client = server.CreateClient()) { spanPrefix = await client.GetStringAsync($"/Main/{nameof(MainController.LogsInDifferentSpans)}/{testId}"); } _fixture.AddValidator(testId, results => { // Span: span-1 // Log: span-1 // Span: span-1-2 // Log: span-1-2 // Span: span-2 // Log: span-2 string projectId = TestEnvironment.GetTestProjectId(); // Let's get our trace. var trace = s_tracePolling.GetTrace(spanPrefix, startTime); Assert.NotNull(trace); // We have 3 logs. Assert.Equal(3, results.Count); // And the resource name of the trace associated to all of them points to the trace // created during the call. Assert.All(results, entry => { Assert.Contains(projectId, entry.Trace); Assert.Contains(trace.TraceId, entry.Trace); }); // Let's check that all the entries are associated to the correct spans. var logEntry1 = Assert.Single(results, e => e.JsonPayload.Fields["message"].StringValue.EndsWith("log-1")); var logEntry12 = Assert.Single(results, e => e.JsonPayload.Fields["message"].StringValue.EndsWith("log-1-2")); var logEntry2 = Assert.Single(results, e => e.JsonPayload.Fields["message"].StringValue.EndsWith("log-2")); var span1 = Assert.Single(trace.Spans, s => EntryData.SpanIdToHex(s.SpanId) == logEntry1.SpanId); Assert.EndsWith("span-1", span1.Name); var span12 = Assert.Single(trace.Spans, s => EntryData.SpanIdToHex(s.SpanId) == logEntry12.SpanId); Assert.EndsWith("span-1-2", span12.Name); var span2 = Assert.Single(trace.Spans, s => EntryData.SpanIdToHex(s.SpanId) == logEntry2.SpanId); Assert.EndsWith("span-2", span2.Name); // Let's check that the spans are correctly created. // span-1-2 is a child of span-1 Assert.Equal(span12.ParentSpanId, span1.SpanId); // span-1 and span-2 have the same parent Assert.Equal(span1.ParentSpanId, span2.ParentSpanId); // The parent of span-1 and span-2 exists, it's the span we created on the middleware // to encompass the whole request. Assert.NotEqual((ulong)0, span1.ParentSpanId); }); }
public void PatchBucket() { var projectId = _fixture.ProjectId; var setupClient = StorageClient.Create(); // GCS bucket names must be globally unique var bucketName = IdGenerator.FromGuid(); setupClient.CreateBucket(projectId, bucketName); StorageSnippetFixture.SleepAfterBucketCreateDelete(); _fixture.RegisterBucketToDelete(bucketName); // Snippet: PatchBucket var client = StorageClient.Create(); // Note: no fetching of the bucket beforehand. We only specify the values we want // to change. var bucket = new Bucket { Name = bucketName, Website = new Bucket.WebsiteData { MainPageSuffix = "index.html", NotFoundPage = "404.html" } }; client.PatchBucket(bucket); // End snippet // Fetch the bucket to check that the change "stuck" var fetchedBucket = client.GetBucket(bucketName); Assert.Equal(bucketName, fetchedBucket.Name); Assert.Equal(bucket.Website.MainPageSuffix, fetchedBucket.Website.MainPageSuffix); }
public async Task Logging_Trace_External_OneEntry(TestServer server) { Timestamp startTime = Timestamp.FromDateTime(DateTime.UtcNow); string testId = IdGenerator.FromGuid(); string url = $"/Main/Critical/{testId}"; using (var client = server.CreateClient()) { await client.GetAsync(url); } server.Dispose(); _fixture.AddValidator(testId, results => { // We only have one log entry. LogEntry entry = Assert.Single(results); // And the resource name of the trace associated to it contains the external trace id. Assert.Contains(TestEnvironment.GetTestProjectId(), entry.Trace); Assert.Contains("external_trace_id", entry.Trace); // The span associated to our entry is the external span. Assert.Equal("external_span_number1", entry.SpanId); }); }
public async Task ParallelWriteAsync() { string[] keys = new string[] { IdGenerator.FromGuid(), IdGenerator.FromGuid(), IdGenerator.FromGuid() }; await RetryHelpers.ExecuteWithRetryAsync(async() => { using var scope = new TransactionScope(); using var connection = _fixture.GetConnection(); await connection.OpenAsync(); await Task.WhenAll(keys.Select(key => { using var cmd = connection.CreateInsertCommand(_fixture.TableName); cmd.Parameters.Add("K", SpannerDbType.String).Value = key; cmd.Parameters.Add("StringValue", SpannerDbType.String).Value = "text"; return(cmd.ExecuteNonQueryAsync()); })); scope.Complete(); }); // Read the inserted values. using var connection = _fixture.GetConnection(); using var command = connection.CreateSelectCommand($"SELECT COUNT(*) AS C FROM {_fixture.TableName} WHERE K IN UNNEST(@Keys)"); command.Parameters.Add("Keys", SpannerDbType.ArrayOf(SpannerDbType.String)).Value = keys; using var reader = command.ExecuteReader(); Assert.True(reader.Read()); Assert.Equal(keys.Length, reader.GetInt32(0)); }
public async Task Logging_Trace_External_MultipleEntries(TestServer server) { Timestamp startTime = Timestamp.FromDateTime(DateTime.UtcNow); string testId = IdGenerator.FromGuid(); string url = $"/Main/LogsThreeEntries/{testId}"; using (var client = server.CreateClient()) { await client.GetAsync(url); } server.Dispose(); _fixture.AddValidator(testId, results => { string projectId = TestEnvironment.GetTestProjectId(); // We have three log entries. Assert.Equal(3, results.Count); // And the resource name of the trace associated to them contains the external trace id. Assert.All(results, entry => { Assert.Contains(projectId, entry.Trace); Assert.Contains("external_trace_id", entry.Trace); }); // The span associated to our entry is the external span, and is the same span number // as the log entry. Assert.All(results, entry => Assert.Equal($"external_span_number{entry.JsonPayload.Fields["message"].StringValue.Last()}", entry.SpanId)); }); }
public void InsertRow_RepeatedField() { var client = BigQueryClient.Create(_fixture.ProjectId); var dataset = client.GetDataset(_fixture.DatasetId); var table = dataset.GetTable(_fixture.ComplexTypesTableId); var guid = IdGenerator.FromGuid(); var row = new BigQueryInsertRow { ["guid"] = guid, ["tags"] = new[] { "a", "b" } }; _fixture.InsertAndWait(table, () => table.InsertRow(row), 1); string sql = $"SELECT guid, tag FROM {table}, UNNEST(tags) AS tag WHERE guid=@guid ORDER BY tag"; var parameters = new[] { new BigQueryParameter("guid", BigQueryDbType.String, guid) }; var resultRows = client.ExecuteQuery(sql, parameters) .Select(r => new { Guid = (string)r["guid"], Tag = (string)r["tag"] }) .ToList(); var expectedResults = new[] { new { Guid = guid, Tag = "a" }, new { Guid = guid, Tag = "b" } }; Assert.Equal(expectedResults, resultRows); }
public async Task Logging_WarningPlus() { string testId = IdGenerator.FromGuid(); using (TestServer server = GetTestServer <NoBufferWarningLoggerTestApplication>()) using (var client = server.CreateClient()) { await client.GetAsync($"/Main/Debug/{testId}"); await client.GetAsync($"/Main/Info/{testId}"); await client.GetAsync($"/Main/Warning/{testId}"); await client.GetAsync($"/Main/Error/{testId}"); await client.GetAsync($"/Main/Critical/{testId}"); } // NoBufferLoggerTestApplication does not support debug or info logs. _fixture.AddValidator(testId, results => { Assert.Equal(3, results.Count()); Assert.NotNull(results.FirstOrDefault(l => l.Severity == LogSeverity.Warning)); Assert.NotNull(results.FirstOrDefault(l => l.Severity == LogSeverity.Error)); Assert.NotNull(results.FirstOrDefault(l => l.Severity == LogSeverity.Critical)); }); }
public void MultiTableWrite() { // For simplicity, use a new key so that this test is entirely self-contained. string key = IdGenerator.FromGuid(); RetryHelpers.ExecuteWithRetry(() => { using (var connection = _fixture.GetConnection()) { connection.Open(); using (var transaction = connection.BeginTransaction()) { using (var cmd1 = connection.CreateInsertCommand(_fixture.TableName)) { cmd1.Transaction = transaction; cmd1.Parameters.Add("K", SpannerDbType.String).Value = key; cmd1.Parameters.Add("StringValue", SpannerDbType.String).Value = "text"; cmd1.ExecuteNonQuery(); } using (var cmd2 = connection.CreateInsertCommand(_fixture.TableName2)) { cmd2.Transaction = transaction; cmd2.Parameters.Add("K", SpannerDbType.String).Value = key; cmd2.Parameters.Add("Int64Value", SpannerDbType.Int64).Value = 50; cmd2.ExecuteNonQuery(); } // Commit mutations from both commands, atomically. transaction.Commit(); } } }); // Read the values from both tables using (var connection = _fixture.GetConnection()) { using (var command = connection.CreateSelectCommand($"SELECT * FROM {_fixture.TableName} WHERE K=@Key")) { command.Parameters.Add("Key", SpannerDbType.String).Value = key; using (var reader = command.ExecuteReader()) { Assert.True(reader.Read()); Assert.Equal("text", reader["StringValue"]); Assert.False(reader.Read()); } } using (var command = connection.CreateSelectCommand($"SELECT * FROM {_fixture.TableName2} WHERE K=@Key")) { command.Parameters.Add("Key", SpannerDbType.String).Value = key; using (var reader = command.ExecuteReader()) { Assert.True(reader.Read()); Assert.Equal(50L, reader["Int64Value"]); Assert.False(reader.Read()); } } } }
public async Task Logging_DiagnosticsOutput() { StringWriter writer; using (var server = GetTestServer <DiagnosticsOutputLoggerTestApplication>()) using (var client = server.CreateClient()) { writer = GetServices(server).GetRequiredService <TextWriter>() as StringWriter; Assert.NotNull(writer); await client.GetAsync($"/Main/Warning/{IdGenerator.FromGuid()}"); await client.GetAsync($"/Main/Warning/{IdGenerator.FromGuid()}"); await client.GetAsync($"/Main/Warning/{IdGenerator.FromGuid()}"); } // Despite multiple log entries (several per call), we should only have one diagnostic log. var diagnostics = writer.ToString(); var lines = diagnostics.Split('\n'); Assert.Equal(2, lines.Length); Assert.Contains("GoogleLogger will write logs", lines[0]); Assert.Empty(lines[1]); }
/// <summary> /// Writes a single "canary" log entry and waits for it to be visible. This is written /// after all the entries from tests, so by the time this is visible, it's "reasonably likely" /// that all the test log messages are also visible. /// </summary> private void LogCanaryMessageAndWait() { DateTime startTime = DateTime.UtcNow; string id = IdGenerator.FromGuid(); LogEntry entry = new LogEntry { Resource = MonitoredResourceBuilder.FromPlatform(), LogName = $"projects/{_projectId}/logs/aspnetcore", Severity = Logging.Type.LogSeverity.Info, Timestamp = Timestamp.FromDateTime(DateTime.UtcNow), JsonPayload = new Struct { Fields = { ["message"] = Value.ForString(id) } } }; _client.WriteLogEntries((LogName)null, null, null, new[] { entry }); var request = BuildRequest(startTime); request.Filter += $" AND jsonPayload.message:\"{id}\""; // Wait for the canary log entry to be visible. var endTime = startTime + s_canaryMessageTimeout; while (DateTime.UtcNow < endTime) { FileLogger.Log("Listing log entries to find the canary"); if (_client.ListLogEntries(request).Any()) { return; } Thread.Sleep(s_delayBetweenCanaryAttempts); } throw new Exception("Canary message never seen."); }
public async Task Logging_ManyEntries() { string testId = IdGenerator.FromGuid(); using (TestServer server = GetTestServer <NoBufferWarningLoggerTestApplication>()) using (var client = server.CreateClient()) { for (int i = 0; i < 250; i++) { await client.GetAsync($"/Main/Debug/{testId}"); await client.GetAsync($"/Main/Info/{testId}"); await client.GetAsync($"/Main/Warning/{testId}"); await client.GetAsync($"/Main/Error/{testId}"); await client.GetAsync($"/Main/Critical/{testId}"); await client.GetAsync($"/Main/Exception/{testId}"); } } _fixture.AddValidator(testId, results => { Assert.Equal(1000, results.Count); Assert.All(results, l => { Assert.NotEqual(LogSeverity.Debug, l.Severity); Assert.NotEqual(LogSeverity.Info, l.Severity); }); Assert.Equal(250, results.Count(l => l.Severity == LogSeverity.Warning)); Assert.Equal(250, results.Count(l => l.Severity == LogSeverity.Error)); Assert.Equal(500, results.Count(l => l.Severity == LogSeverity.Critical)); // Exception and Critical }); }
public void CopySpecificGeneration() { var client = _fixture.Client; var sourceBucket = _fixture.ReadBucket; var sourceName = _fixture.SmallThenLargeObject; var destBucket = _fixture.SingleVersionBucket; var firstGenName = IdGenerator.FromGuid(); var secondGenName = IdGenerator.FromGuid(); var generations = client.ListObjects(sourceBucket, sourceName, new ListObjectsOptions { Versions = true }) .Select(o => (long)o.Generation) .OrderBy(o => o) .ToList(); Assert.Equal(2, generations.Count); client.CopyObject(sourceBucket, sourceName, destBucket, firstGenName, new CopyObjectOptions { SourceGeneration = generations[0] }); client.CopyObject(sourceBucket, sourceName, destBucket, secondGenName, new CopyObjectOptions { SourceGeneration = generations[1] }); ValidateData(destBucket, firstGenName, _fixture.SmallContent); ValidateData(destBucket, secondGenName, _fixture.LargeContent); }
public async Task CanCreateAndDeleteData() { var dataType = "SqlUnitTestObject"; var utcNow = DateTime.UtcNow; var container = new GenericDataContainer( IdGenerator.FromGuid(), "jdoe", utcNow, "jdoe", utcNow, ApiVersion.Current, BsonDocument.Parse("{ Name : 'Jan', Address : { Street : 'Teglholm Tværvej', Number : 27 }}")); var sut = CreateMssqlRdDataStorage(); StoreResult storeResult = null; Assert.That(async() => storeResult = await sut.StoreAsync(dataType, container, false), Throws.Nothing); Assert.That(storeResult.ModificationType, Is.EqualTo(DataModificationType.Created)); var retrievedContainer = await sut.GetFromIdAsync(dataType, storeResult.Id); Assert.That(retrievedContainer.Submitter, Is.EqualTo(container.Submitter)); Assert.That(retrievedContainer.SubmissionTimeUtc, Is.EqualTo(container.SubmissionTimeUtc).Within(TimeSpan.FromSeconds(1))); Assert.That(retrievedContainer.Data.GetValue("Name").AsString, Is.EqualTo(container.Data.GetValue("Name").AsString)); // Cannot add document a second time Assert.That(async() => storeResult = await sut.StoreAsync(dataType, container, false), Throws.TypeOf <DocumentAlreadyExistsException>()); // Can overwrite document Assert.That(async() => storeResult = await sut.StoreAsync(dataType, container, true), Throws.Nothing); Assert.That(storeResult.ModificationType, Is.EqualTo(DataModificationType.Replaced)); var isDeleted = await sut.DeleteDataContainerAsync(dataType, storeResult.Id); Assert.That(isDeleted, Is.True); }