public void ParsesEtag(string value, string expectedValue) { ETag tag = ETag.Parse(value); Assert.AreEqual(expectedValue, tag.ToString()); Assert.AreEqual(expectedValue, tag.ToString("G")); }
public void StringRoundtrips(string value) { var eTag = new ETag(value); Assert.AreSame(value, eTag.ToString()); Assert.AreSame(value, eTag.ToString("G")); }
public void StringRoundtrips() { var s = "tag"; var eTag = new ETag(s); Assert.AreSame(s, eTag.ToString()); }
public async Task <Response> DeleteAsync(string key, string label = default, ETag etag = default, CancellationToken cancellation = default) { if (string.IsNullOrEmpty(key)) { throw new ArgumentNullException(nameof(key)); } using (var request = _pipeline.CreateRequest()) { request.Method = HttpPipelineMethod.Delete; BuildUriForKvRoute(request.UriBuilder, key, label); if (etag != default) { request.AddHeader(IfMatchName, $"\"{etag.ToString()}\""); } var response = await _pipeline.SendRequestAsync(request, cancellation).ConfigureAwait(false); if (response.Status == 200 || response.Status == 204) { return(response); } else { throw new RequestFailedException(response); } } }
/// <summary> /// Wraps eTag with quotes if not already wrapped. /// This a workaround for https://github.com/Azure/azure-sdk-for-net/issues/22877 /// </summary> /// <param name="value">Etag to wrap with quotes if needed</param> /// <returns>Etag string wrapped with quotes</returns> public static string GetHttpSafeETag(this ETag value) { //TODO Remove when https://github.com/Azure/azure-sdk-for-net/issues/22877 is fixed var eTag = value.ToString(); if (!string.IsNullOrEmpty(eTag)) { if (!(eTag.StartsWith("\"") || eTag.StartsWith("W"))) { eTag = string.Format("\"{0}\"", eTag); } } return(eTag); }
public virtual void DeleteEntity(string partitionKey, string rowKey, ETag ifMatch = default) { _batchOperations.AddDeleteEntityRequest( _changeset, _table, partitionKey, rowKey, ifMatch.ToString(), null, null, queryOptions: new QueryOptions() { Format = _format }); }
public virtual void UpdateEntity <T>(T entity, ETag ifMatch, TableUpdateMode mode = TableUpdateMode.Merge) where T : class, ITableEntity, new() { _batchOperations.AddUpdateEntityRequest( _changeset, _table, entity.PartitionKey, entity.RowKey, null, null, ifMatch.ToString(), tableEntityProperties: entity.ToOdataAnnotatedDictionary(), queryOptions: new QueryOptions() { Format = _format }); }
private Request CreateDeleteRequest(string key, string label, ETag etag) { if (string.IsNullOrEmpty(key)) { throw new ArgumentNullException(nameof(key)); } Request request = _pipeline.CreateRequest(); request.Method = RequestMethod.Delete; BuildUriForKvRoute(request.UriBuilder, key, label); if (etag != default) { request.Headers.Add(IfMatchName, $"\"{etag.ToString()}\""); } return(request); }
public async Task <Response> DeleteAsync(string key, string label = default, ETag etag = default, CancellationToken cancellation = default) { if (string.IsNullOrEmpty(key)) { throw new ArgumentNullException(nameof(key)); } Uri uri = BuildUriForKvRoute(key, label); using (var request = _pipeline.CreateRequest()) { request.SetRequestLine(HttpVerb.Delete, uri); if (etag != default) { request.AddHeader(IfMatchName, $"\"{etag.ToString()}\""); } AddClientRequestID(request); AddAuthenticationHeaders(request, uri, HttpVerb.Delete, content: default, _secret, _credential);
private Response DeleteSkillset( string skillsetName, ETag?etag, bool onlyIfUnchanged, CancellationToken cancellationToken) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexerClient)}.{nameof(DeleteSkillset)}"); scope.Start(); try { return(SkillsetsClient.Delete( skillsetName, onlyIfUnchanged ? etag?.ToString() : null, null, cancellationToken)); } catch (Exception ex) { scope.Failed(ex); throw; } }
private async Task <Response> DeleteDataSourceConnectionAsync( string dataSourceConnectionName, ETag?etag, bool onlyIfUnchanged, CancellationToken cancellationToken) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexerClient)}.{nameof(DeleteDataSourceConnection)}"); scope.Start(); try { return(await DataSourcesClient.DeleteAsync( dataSourceConnectionName, onlyIfUnchanged?etag?.ToString() : null, null, cancellationToken) .ConfigureAwait(false)); } catch (Exception ex) { scope.Failed(ex); throw; } }
public async Task Next() { // Arrange string chunkPath = "chunkPath"; long blockOffset = 5; long eventIndex = 10; string topic = "topic"; string subject = "subject"; string eventType = "BlobCreated"; DateTimeOffset eventTime = new DateTimeOffset(2020, 4, 30, 8, 26, 30, TimeSpan.Zero); Guid eventId = Guid.NewGuid(); long dataVersion = 1; string metadataVersion = "1"; string api = "PutBlob"; string clientRequestId = $"Azure-Storage-Powershell-{Guid.NewGuid()}"; Guid requestId = Guid.NewGuid(); ETag etag = new ETag("0x8D75EF45A3B8617"); string contentType = "contentType"; long contentLength = Constants.KB; string blobType = "BlockBlob"; long contentOffset = 5; Uri destinationUri = new Uri("https://www.destination.com"); Uri sourceUri = new Uri("https://www.source.com"); Uri uri = new Uri("https://www.uri.com"); bool recursive = true; string sequencer = "sequencer"; Dictionary <string, object> record = new Dictionary <string, object> { { Constants.ChangeFeed.Event.Topic, topic }, { Constants.ChangeFeed.Event.Subject, subject }, { Constants.ChangeFeed.Event.EventType, eventType }, { Constants.ChangeFeed.Event.EventTime, eventTime.ToString() }, { Constants.ChangeFeed.Event.EventId, eventId.ToString() }, { Constants.ChangeFeed.Event.SchemaVersion, dataVersion }, { Constants.ChangeFeed.Event.MetadataVersion, metadataVersion }, { Constants.ChangeFeed.Event.Data, new Dictionary <string, object> { { Constants.ChangeFeed.EventData.Api, api }, { Constants.ChangeFeed.EventData.ClientRequestId, clientRequestId.ToString() }, { Constants.ChangeFeed.EventData.RequestId, requestId.ToString() }, { Constants.ChangeFeed.EventData.Etag, etag.ToString() }, { Constants.ChangeFeed.EventData.ContentType, contentType }, { Constants.ChangeFeed.EventData.ContentLength, contentLength }, { Constants.ChangeFeed.EventData.BlobType, blobType }, { Constants.ChangeFeed.EventData.ContentOffset, contentOffset }, { Constants.ChangeFeed.EventData.DestinationUrl, destinationUri.ToString() }, { Constants.ChangeFeed.EventData.SourceUrl, sourceUri.ToString() }, { Constants.ChangeFeed.EventData.Url, uri.ToString() }, { Constants.ChangeFeed.EventData.Recursive, recursive }, { Constants.ChangeFeed.EventData.Sequencer, sequencer } } } }; Mock <BlobContainerClient> containerClient = new Mock <BlobContainerClient>(MockBehavior.Strict); Mock <BlobClient> blobClient = new Mock <BlobClient>(MockBehavior.Strict); Mock <AvroReaderFactory> avroReaderFactory = new Mock <AvroReaderFactory>(MockBehavior.Strict); Mock <AvroReader> avroReader = new Mock <AvroReader>(MockBehavior.Strict); Mock <LazyLoadingBlobStreamFactory> lazyLoadingBlobStreamFactory = new Mock <LazyLoadingBlobStreamFactory>(MockBehavior.Strict); Mock <LazyLoadingBlobStream> dataStream = new Mock <LazyLoadingBlobStream>(MockBehavior.Strict); Mock <LazyLoadingBlobStream> headStream = new Mock <LazyLoadingBlobStream>(MockBehavior.Strict); containerClient.Setup(r => r.GetBlobClient(It.IsAny <string>())).Returns(blobClient.Object); lazyLoadingBlobStreamFactory.SetupSequence(r => r.BuildLazyLoadingBlobStream( It.IsAny <BlobClient>(), It.IsAny <long>(), It.IsAny <long>())) .Returns(dataStream.Object) .Returns(headStream.Object); avroReaderFactory.Setup(r => r.BuildAvroReader( It.IsAny <Stream>(), It.IsAny <Stream>(), It.IsAny <long>(), It.IsAny <long>())).Returns(avroReader.Object); avroReader.Setup(r => r.HasNext()).Returns(true); avroReader.Setup(r => r.Initalize(It.IsAny <bool>(), It.IsAny <CancellationToken>())).Returns(Task.CompletedTask); avroReader.Setup(r => r.Next( It.IsAny <bool>(), It.IsAny <CancellationToken>())) .ReturnsAsync(record); avroReader.Setup(r => r.BlockOffset).Returns(blockOffset); avroReader.Setup(r => r.ObjectIndex).Returns(eventIndex); ChunkFactory chunkFactory = new ChunkFactory( containerClient.Object, lazyLoadingBlobStreamFactory.Object, avroReaderFactory.Object); Chunk chunk = await chunkFactory.BuildChunk( IsAsync, chunkPath, blockOffset, eventIndex); // Act BlobChangeFeedEvent changeFeedEvent = await chunk.Next(IsAsync); // Assert Assert.AreEqual(topic, changeFeedEvent.Topic); Assert.AreEqual(subject, changeFeedEvent.Subject); Assert.AreEqual(BlobChangeFeedEventType.BlobCreated, changeFeedEvent.EventType); Assert.AreEqual(eventTime, changeFeedEvent.EventTime); Assert.AreEqual(eventId, changeFeedEvent.Id); Assert.AreEqual(dataVersion, changeFeedEvent.SchemaVersion); Assert.AreEqual(metadataVersion, changeFeedEvent.MetadataVersion); Assert.AreEqual(BlobOperationName.PutBlob, changeFeedEvent.EventData.BlobOperationName); Assert.AreEqual(clientRequestId, changeFeedEvent.EventData.ClientRequestId); Assert.AreEqual(requestId, changeFeedEvent.EventData.RequestId); Assert.AreEqual(etag, changeFeedEvent.EventData.ETag); Assert.AreEqual(contentType, changeFeedEvent.EventData.ContentType); Assert.AreEqual(contentLength, changeFeedEvent.EventData.ContentLength); Assert.AreEqual(BlobType.Block, changeFeedEvent.EventData.BlobType); Assert.AreEqual(contentOffset, changeFeedEvent.EventData.ContentOffset); Assert.AreEqual(destinationUri, changeFeedEvent.EventData.DestinationUri); Assert.AreEqual(sourceUri, changeFeedEvent.EventData.SourceUri); Assert.AreEqual(uri, changeFeedEvent.EventData.Uri); Assert.AreEqual(recursive, changeFeedEvent.EventData.Recursive); Assert.AreEqual(sequencer, changeFeedEvent.EventData.Sequencer); containerClient.Verify(r => r.GetBlobClient(chunkPath)); lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( blobClient.Object, blockOffset, Constants.ChangeFeed.ChunkBlockDownloadSize)); lazyLoadingBlobStreamFactory.Verify(r => r.BuildLazyLoadingBlobStream( blobClient.Object, 0, 3 * Constants.KB)); avroReaderFactory.Verify(r => r.BuildAvroReader( dataStream.Object, headStream.Object, blockOffset, eventIndex)); avroReader.Verify(r => r.HasNext()); avroReader.Verify(r => r.Next( IsAsync, default)); avroReader.Verify(r => r.BlockOffset); avroReader.Verify(r => r.ObjectIndex); }
public Dictionary <string, AttributeValue> GetFields(bool includeKeys = false) { var fields = new Dictionary <string, AttributeValue>(); if (includeKeys) { fields.Add(DEPLOYMENT_ID_PROPERTY_NAME, new AttributeValue(DeploymentId)); fields.Add(SILO_IDENTITY_PROPERTY_NAME, new AttributeValue(SiloIdentity)); } if (!string.IsNullOrWhiteSpace(Address)) { fields.Add(ADDRESS_PROPERTY_NAME, new AttributeValue(Address)); } fields.Add(PORT_PROPERTY_NAME, new AttributeValue { N = Port.ToString() }); fields.Add(GENERATION_PROPERTY_NAME, new AttributeValue { N = Generation.ToString() }); if (!string.IsNullOrWhiteSpace(HostName)) { fields.Add(HOSTNAME_PROPERTY_NAME, new AttributeValue(HostName)); } fields.Add(STATUS_PROPERTY_NAME, new AttributeValue { N = Status.ToString() }); fields.Add(PROXY_PORT_PROPERTY_NAME, new AttributeValue { N = ProxyPort.ToString() }); if (!string.IsNullOrWhiteSpace(SiloName)) { fields.Add(SILO_NAME_PROPERTY_NAME, new AttributeValue(SiloName)); } if (!string.IsNullOrWhiteSpace(SuspectingSilos)) { fields.Add(SUSPECTING_SILOS_PROPERTY_NAME, new AttributeValue(SuspectingSilos)); } if (!string.IsNullOrWhiteSpace(SuspectingTimes)) { fields.Add(SUSPECTING_TIMES_PROPERTY_NAME, new AttributeValue(SuspectingTimes)); } if (!string.IsNullOrWhiteSpace(StartTime)) { fields.Add(START_TIME_PROPERTY_NAME, new AttributeValue(StartTime)); } if (!string.IsNullOrWhiteSpace(IAmAliveTime)) { fields.Add(I_AM_ALIVE_TIME_PROPERTY_NAME, new AttributeValue(IAmAliveTime)); } fields.Add(MEMBERSHIP_VERSION_PROPERTY_NAME, new AttributeValue { N = MembershipVersion.ToString() }); fields.Add(ETAG_PROPERTY_NAME, new AttributeValue { N = ETag.ToString() }); return(fields); }
public void InvalidFormatThrows(string format) { ETag tag = new ETag("foo"); Assert.Throws <ArgumentException>(() => tag.ToString(format)); }
public void NullValueHasNoStringValue(string value) { ETag tag = new ETag(value); Assert.That(tag.ToString(), Is.Empty); }