public void Dispose() { var robot = new RetryRobot() { MaxTryCount = 10, ShouldRetry = (e) => true, }; robot.Eventually(() => { _storage.DeleteBucket(BucketName); }); }
public void Dispose() { var robot = new RetryRobot() { MaxTryCount = 10, ShouldRetry = (e) => true, }; foreach (var bucket in _garbage) { foreach (var objectName in bucket.Value) { robot.Eventually(() => { _storage.DeleteObject(bucket.Key, objectName); }); } } _garbage.Clear(); }
public void Dispose() { RetryRobot robot = new RetryRobot() { MaxTryCount = 10, ShouldRetry = (e) => true, }; foreach (KeyValuePair <string, SortedSet <string> > bucket in _garbage) { foreach (string objectName in bucket.Value) { robot.Eventually(() => { _storage.DeleteObject(bucket.Key, objectName); }); } } _garbage.Clear(); }
public void TestImportDataFromStream() { string datasetId = "datasetForTestImportDataFromStream"; string newTableId = "tableForTestImportDataFromStream" + RandomSuffix(); string gcsUploadTestWord = "exampleJsonFromStream"; string valueToTest = ""; _tablesToDelete.Add(new Tuple <string, string>(datasetId, newTableId)); _datasetsToDelete.Add(datasetId); CreateDataset(datasetId, _client); CreateTable(datasetId, newTableId, _client); // Import data. UploadJsonStreaming(datasetId, newTableId, _client); // Query table to get first row and confirm it contains the expected value. var newTable = _client.GetTable(datasetId, newTableId); string query = $"SELECT title, unique_words FROM {newTable} ORDER BY title"; try { var retryRobot = new RetryRobot(); retryRobot.ShouldRetry = (ex) => ex is InvalidOperationException; retryRobot.FirstRetryDelayMs = 100; retryRobot.Eventually(() => { BigQueryResults results = AsyncQuery(_projectId, datasetId, newTableId, query, _client); var row = results.First(); valueToTest = row["title"].ToString(); }); } catch (Exception) { // All of the retries failed. } Assert.Equal(gcsUploadTestWord, valueToTest); }