private async ValueTask <Response> UpdateStatusAsync(bool async, CancellationToken cancellationToken) { if (!_hasCompleted) { using DiagnosticScope? scope = _diagnostics?.CreateScope($"{nameof(SparkSessionOperation)}.{nameof(UpdateStatus)}"); scope?.Start(); try { // Get the latest status Response <SparkBatchJob> update = async ? await _client.GetSparkBatchJobAsync(_batchId, true, cancellationToken).ConfigureAwait(false) : _client.GetSparkBatchJob(_batchId, true, cancellationToken); // Check if the operation is no longer running _hasCompleted = IsJobComplete(update.Value.Result ?? SparkBatchJobResultType.Uncertain, update.Value.State.Value, _completionType); if (_hasCompleted) { _hasValue = true; _value = update.Value; } // Update raw response _rawResponse = update.GetRawResponse(); } catch (Exception e) { scope?.Failed(e); throw; } } return(GetRawResponse()); }
/// <summary> /// Recognizes values from one or more business cards. /// <para>See <a href="https://aka.ms/formrecognizer/businesscardfields"/> for a list of available fields on a business card.</para> /// </summary> /// <param name="businessCard">The stream containing the one or more business cards to recognize values from.</param> /// <param name="recognizeBusinessCardsOptions">A set of options available for configuring the recognize request.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A <see cref="RecognizeBusinessCardsOperation"/> to wait on this long-running operation. Its <see cref="RecognizeBusinessCardsOperation.Value"/> upon successful /// completion will contain the extracted business cards.</returns> public virtual RecognizeBusinessCardsOperation StartRecognizeBusinessCards(Stream businessCard, RecognizeBusinessCardsOptions recognizeBusinessCardsOptions = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(businessCard, nameof(businessCard)); recognizeBusinessCardsOptions ??= new RecognizeBusinessCardsOptions(); using DiagnosticScope scope = Diagnostics.CreateScope($"{nameof(FormRecognizerClient)}.{nameof(StartRecognizeBusinessCards)}"); scope.Start(); try { FormContentType contentType = recognizeBusinessCardsOptions.ContentType ?? DetectContentType(businessCard, nameof(businessCard)); Response response = ServiceClient.AnalyzeBusinessCardAsync(contentType, businessCard, recognizeBusinessCardsOptions.IncludeFieldElements, recognizeBusinessCardsOptions.Locale, cancellationToken); string location = ClientCommon.GetResponseHeader(response.Headers, Constants.OperationLocationHeader); return(new RecognizeBusinessCardsOperation(ServiceClient, Diagnostics, location)); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Gets a list of tables from the storage account. /// </summary> /// <param name="select">Returns the desired properties of an entity from the set. </param> /// <param name="filter">Returns only tables or entities that satisfy the specified filter.</param> /// <param name="top">Returns only the top n tables or entities from the set.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns></returns> public virtual AsyncPageable <TableItem> GetTablesAsync(string select = null, string filter = null, int?top = null, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(TableServiceClient)}.{nameof(GetTables)}"); scope.Start(); try { return(PageableHelpers.CreateAsyncEnumerable(async _ => { var response = await _tableOperations.QueryAsync( null, null, new QueryOptions() { Filter = filter, Select = select, Top = top, Format = _format }, cancellationToken).ConfigureAwait(false); return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse()); }, async(nextLink, _) => { var response = await _tableOperations.QueryAsync( null, nextTableName: nextLink, new QueryOptions() { Filter = filter, Select = select, Top = top, Format = _format }, cancellationToken).ConfigureAwait(false); return Page.FromValues(response.Value.Value, response.Headers.XMsContinuationNextTableName, response.GetRawResponse()); })); } catch (Exception ex) { scope.Failed(ex); throw; } }
/// <summary> /// Creates or updates a role definition. /// </summary> /// <param name="options">An instance of <see cref="CreateOrUpdateRoleDefinitionOptions"/> that define parameters for creating a role definition.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A <see cref="Task{TResult}"/> representing the result of the asynchronous operation.</returns> /// <exception cref="ArgumentNullException"><paramref name="options"/> is null.</exception> public virtual async Task <Response <KeyVaultRoleDefinition> > CreateOrUpdateRoleDefinitionAsync(CreateOrUpdateRoleDefinitionOptions options, CancellationToken cancellationToken = default) { Argument.AssertNotNull(options, nameof(options)); using DiagnosticScope scope = _diagnostics.CreateScope($"{nameof(KeyVaultAccessControlClient)}.{nameof(CreateOrUpdateRoleDefinition)}"); scope.Start(); try { RoleDefinitionCreateParameters parameters = options.ToParameters(KeyVaultRoleType.CustomRole); return(await _definitionsRestClient.CreateOrUpdateAsync( vaultBaseUrl : VaultUri.AbsoluteUri, scope : options.RoleScope != default?options.RoleScope.ToString() : KeyVaultRoleScope.Global.ToString(), roleDefinitionName : options.RoleDefinitionName.ToString(), parameters : parameters, cancellationToken : cancellationToken) .ConfigureAwait(false)); } catch (Exception ex) { scope.Failed(ex); throw; } }
public virtual Response <EncryptResult> Encrypt(EncryptionAlgorithm algorithm, byte[] plaintext, CancellationToken cancellationToken = default) { var parameters = new KeyEncryptParameters() { Algorithm = algorithm.ToString(), Value = plaintext, }; using DiagnosticScope scope = Pipeline.CreateScope($"{nameof(RemoteCryptographyClient)}.{nameof(Encrypt)}"); scope.AddAttribute("key", _keyId); scope.Start(); try { return(Pipeline.SendRequest(RequestMethod.Post, parameters, () => new EncryptResult { Algorithm = algorithm }, cancellationToken, "/encrypt")); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Gets a list of all alias definitions available for a search service. /// </summary> /// <param name="cancellationToken">Optional <see cref="CancellationToken"/> to propagate notifications that the operation should be canceled.</param> /// <returns>The <see cref="Pageable{T}"/> from the server containing a list of <see cref="SearchAlias"/> objects.</returns> /// <exception cref="RequestFailedException">Thrown when a failure is returned by the Search service.</exception> public virtual AsyncPageable <SearchAlias> GetAliasesAsync(CancellationToken cancellationToken = default) { return(PageResponseEnumerator.CreateAsyncEnumerable(async(continuationToken) => { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(SearchIndexClient)}.{nameof(GetAliases)}"); scope.Start(); try { if (continuationToken != null) { throw new NotSupportedException("A continuation token is unsupported."); } Response <ListAliasesResult> result = await AliasesClient.ListAsync(cancellationToken).ConfigureAwait(false); return Page <SearchAlias> .FromValues(result.Value.Aliases, null, result.GetRawResponse()); } catch (Exception ex) { scope.Failed(ex); throw; } })); }
/// <summary> /// Recognizes layout elements from one or more passed-in forms. /// </summary> /// <param name="form">The stream containing one or more forms to recognize elements from.</param> /// <param name="recognizeOptions">A set of options available for configuring the recognize request.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A <see cref="RecognizeContentOperation"/> to wait on this long-running operation. Its <see cref="RecognizeContentOperation.Value"/> upon successful /// completion will contain layout elements extracted from the form.</returns> public virtual RecognizeContentOperation StartRecognizeContent(Stream form, RecognizeOptions recognizeOptions = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(form, nameof(form)); recognizeOptions ??= new RecognizeOptions(); using DiagnosticScope scope = Diagnostics.CreateScope($"{nameof(FormRecognizerClient)}.{nameof(StartRecognizeContent)}"); scope.Start(); try { FormContentType contentType = recognizeOptions.ContentType ?? DetectContentType(form, nameof(form)); Response response = ServiceClient.AnalyzeLayoutAsync(contentType, form, cancellationToken); string location = ClientCommon.GetResponseHeader(response.Headers, Constants.OperationLocationHeader); return(new RecognizeContentOperation(ServiceClient, Diagnostics, location)); } catch (Exception e) { scope.Failed(e); throw; } }
public virtual async Task <Response <EncryptResult> > EncryptAsync(EncryptionAlgorithm algorithm, byte[] plaintext, byte[] iv = default, byte[] authenticationData = default, CancellationToken cancellationToken = default) { var parameters = new KeyEncryptParameters() { Algorithm = algorithm.ToString(), Value = plaintext, Iv = iv, AuthenticationData = authenticationData }; using DiagnosticScope scope = Pipeline.CreateScope("Azure.Security.KeyVault.Keys.Cryptography.RemoteCryptographyClient.Encrypt"); scope.AddAttribute("key", _keyId); scope.Start(); try { return(await Pipeline.SendRequestAsync(RequestMethod.Post, parameters, () => new EncryptResult { Algorithm = algorithm }, cancellationToken, "/encrypt").ConfigureAwait(false)); } catch (Exception e) { scope.Failed(e); throw; } }
public virtual Response <DecryptResult> Decrypt(EncryptionAlgorithm algorithm, byte[] ciphertext, CancellationToken cancellationToken = default) { var parameters = new KeyEncryptParameters() { Algorithm = algorithm.ToString(), Value = ciphertext, }; using DiagnosticScope scope = Pipeline.CreateScope("Azure.Security.KeyVault.Keys.Cryptography.RemoteCryptographyClient.Decrypt"); scope.AddAttribute("key", _keyId); scope.Start(); try { return(Pipeline.SendRequest(RequestMethod.Post, parameters, () => new DecryptResult { Algorithm = algorithm }, cancellationToken, "/decrypt")); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Trains a model from a collection of custom forms in a blob storage container. /// </summary> /// <param name="trainingFilesUri">An externally accessible Azure storage blob container Uri. /// For more information see <a href="https://docs.microsoft.com/azure/cognitive-services/form-recognizer/build-training-data-set#upload-your-training-data"/>.</param> /// <param name="useTrainingLabels">If <c>true</c>, use a label file created in the <link-to-label-tool-doc> to provide training-time labels for training a model. If <c>false</c>, the model will be trained from forms only.</param> /// <param name="trainingOptions">A set of options available for configuring the training request.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns> /// <para>A <see cref="TrainingOperation"/> to wait on this long-running operation. Its <see cref="TrainingOperation.Value"/> upon successful /// completion will contain meta-data about the trained model.</para> /// <para>Even if training fails, a model is created in the Form Recognizer account with an "invalid" status. /// A <see cref="RequestFailedException"/> will be raised containing the modelId to access this invalid model.</para> /// </returns> public virtual TrainingOperation StartTraining(Uri trainingFilesUri, bool useTrainingLabels, TrainingOptions trainingOptions = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(trainingFilesUri, nameof(trainingFilesUri)); trainingOptions ??= new TrainingOptions(); using DiagnosticScope scope = Diagnostics.CreateScope($"{nameof(FormTrainingClient)}.{nameof(StartTraining)}"); scope.Start(); try { var trainRequest = new TrainRequest(trainingFilesUri.AbsoluteUri) { SourceFilter = trainingOptions.TrainingFileFilter, UseLabelFile = useTrainingLabels }; ResponseWithHeaders <ServiceTrainCustomModelAsyncHeaders> response = ServiceClient.TrainCustomModelAsync(trainRequest); return(new TrainingOperation(response.Headers.Location, ServiceClient, Diagnostics)); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> Play audio in the call. </summary> /// <param name="options"> Play audio request. </param> /// <param name="cancellationToken"> The cancellation token. </param> /// <exception cref="RequestFailedException">The server returned an error. See <see cref="Exception.Message"/> for details returned from the server.</exception> /// <exception cref="ArgumentNullException"> <paramref name="options"/> is null. </exception> public virtual async Task <Response <PlayAudioResult> > PlayAudioAsync(PlayAudioOptions options, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(CallConnection)}.{nameof(PlayAudio)}"); scope.Start(); try { Argument.AssertNotNull(options, nameof(options)); return(await RestClient.PlayAudioAsync( callConnectionId : CallConnectionId, audioFileUri : options.AudioFileUri?.AbsoluteUri, loop : options.Loop, audioFileId : options.AudioFileId, callbackUri : options.CallbackUri?.AbsoluteUri, operationContext : options.OperationContext, cancellationToken : cancellationToken ).ConfigureAwait(false)); } catch (Exception ex) { scope.Failed(ex); throw; } }
/// <summary> /// Retrieves a <see cref="CryptographyClient"/> capable of performing cryptographic operations with the key represented by the specfiied <paramref name="keyId"/>. /// </summary> /// <param name="keyId">The key idenitifier of the key used by the created <see cref="CryptographyClient"/>.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A new <see cref="CryptographyClient"/> capable of performing cryptographic operations with the key represented by the specfiied <paramref name="keyId"/>.</returns> /// <exception cref="ArgumentNullException"><paramref name="keyId"/> is null.</exception> public virtual CryptographyClient Resolve(Uri keyId, CancellationToken cancellationToken = default) { Argument.AssertNotNull(keyId, nameof(keyId)); using DiagnosticScope scope = _clientDiagnostics.CreateScope("Azure.Security.KeyVault.Keys.Cryptography.KeyResolver.Resolve"); scope.AddAttribute("key", keyId); scope.Start(); try { Argument.AssertNotNull(keyId, nameof(keyId)); KeyVaultKey key = GetKey(keyId, cancellationToken); KeyVaultPipeline pipeline = new KeyVaultPipeline(keyId, _apiVersion, _pipeline, _clientDiagnostics); return((key != null) ? new CryptographyClient(key, pipeline) : new CryptographyClient(keyId, pipeline)); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Retrieves a <see cref="CryptographyClient"/> capable of performing cryptographic operations with the key represented by the specfiied <paramref name="keyId"/>. /// </summary> /// <param name="keyId">The key idenitifier of the key used by the created <see cref="CryptographyClient"/>.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A new <see cref="CryptographyClient"/> capable of performing cryptographic operations with the key represented by the specfiied <paramref name="keyId"/>.</returns> /// <exception cref="ArgumentNullException"><paramref name="keyId"/> is null.</exception> public virtual async Task <CryptographyClient> ResolveAsync(Uri keyId, CancellationToken cancellationToken = default) { Argument.AssertNotNull(keyId, nameof(keyId)); using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(KeyResolver)}.{nameof(Resolve)}"); scope.AddAttribute("key", keyId); scope.Start(); try { Argument.AssertNotNull(keyId, nameof(keyId)); KeyVaultKey key = await GetKeyAsync(keyId, cancellationToken).ConfigureAwait(false); KeyVaultPipeline pipeline = new KeyVaultPipeline(keyId, _apiVersion, _pipeline, _clientDiagnostics); return((key != null) ? new CryptographyClient(key, pipeline) : new CryptographyClient(keyId, pipeline)); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Creates a composed model from a collection of existing models. /// A composed model allows multiple models to be called with a single model ID. When a document is /// submitted to be analyzed with a composed model ID, a classification step is first performed to /// route it to the correct custom model. /// </summary> /// <param name="modelIds">List of model ids to use in the composed model.</param> /// <param name="modelId">A unique ID for your composed model. If not specified, a model ID will be created for you.</param> /// <param name="modelDescription">An optional description to add to the model.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns> /// <para>A <see cref="BuildModelOperation"/> to wait on this long-running operation. Its Value upon successful /// completion will contain meta-data about the composed model.</para> /// </returns> public virtual BuildModelOperation StartCreateComposedModel(IEnumerable <string> modelIds, string modelId = default, string modelDescription = default, CancellationToken cancellationToken = default) { Argument.AssertNotNull(modelIds, nameof(modelIds)); using DiagnosticScope scope = Diagnostics.CreateScope($"{nameof(DocumentModelAdministrationClient)}.{nameof(StartCreateComposedModel)}"); scope.Start(); try { modelId ??= Guid.NewGuid().ToString(); var composeRequest = new ComposeDocumentModelRequest(modelId, ConvertToComponentModelInfo(modelIds)) { Description = modelDescription }; var response = ServiceClient.ComposeDocumentModel(composeRequest, cancellationToken); return(new BuildModelOperation(response.Headers.OperationLocation, response.GetRawResponse(), ServiceClient, Diagnostics)); } catch (Exception e) { scope.Failed(e); throw; } }
public void NestedClientActivitiesSuppressed(int kind, bool expectSuppression) { using var testListener = new TestDiagnosticListener("Azure.Clients"); DiagnosticScopeFactory clientDiagnostics = new DiagnosticScopeFactory("Azure.Clients", "Microsoft.Azure.Core.Cool.Tests", true, true); using DiagnosticScope scope = clientDiagnostics.CreateScope("ClientName.ActivityName", (DiagnosticScope.ActivityKind)kind); scope.Start(); DiagnosticScope nestedScope = clientDiagnostics.CreateScope("ClientName.NestedActivityName", (DiagnosticScope.ActivityKind)kind); nestedScope.Start(); if (expectSuppression) { Assert.IsFalse(nestedScope.IsEnabled); Assert.AreEqual("ClientName.ActivityName", Activity.Current.OperationName); } else { Assert.IsTrue(nestedScope.IsEnabled); Assert.AreEqual("ClientName.NestedActivityName", Activity.Current.OperationName); } nestedScope.Dispose(); Assert.AreEqual("ClientName.ActivityName", Activity.Current.OperationName); }
public virtual Response <SignResult> Sign(SignatureAlgorithm algorithm, byte[] digest, CancellationToken cancellationToken = default) { var parameters = new KeySignParameters { Algorithm = algorithm.ToString(), Digest = digest }; using DiagnosticScope scope = Pipeline.CreateScope("Azure.Security.KeyVault.Keys.Cryptography.RemoteCryptographyClient.Sign"); scope.AddAttribute("key", _keyId); scope.Start(); try { return(Pipeline.SendRequest(RequestMethod.Post, parameters, () => new SignResult { Algorithm = algorithm }, cancellationToken, "/sign")); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> Play audio in the call. </summary> /// <param name="callLegId"> The call leg id. </param> /// <param name="options"> Play audio request. </param> /// <param name="cancellationToken"> The cancellation token. </param> /// <exception cref="RequestFailedException">The server returned an error. See <see cref="Exception.Message"/> for details returned from the server.</exception> /// <exception cref="ArgumentNullException"> <paramref name="options"/> is null. </exception> public virtual Response <PlayAudioResponse> PlayAudio(string callLegId, PlayAudioOptions options, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(CallClient)}.{nameof(PlayAudio)}"); scope.Start(); try { Argument.AssertNotNull(options, nameof(options)); return(RestClient.PlayAudio( callId: callLegId, audioFileUri: options.AudioFileUri?.AbsoluteUri, loop: options.Loop, audioFileId: options.AudioFileId, callbackUri: options.CallbackUri?.AbsoluteUri, operationContext: options.OperationContext, cancellationToken: cancellationToken )); } catch (Exception ex) { scope.Failed(ex); throw; } }
public void ResourceNameIsOptional() { using var testListener = new TestDiagnosticListener("Azure.Clients"); DiagnosticScopeFactory clientDiagnostics = new DiagnosticScopeFactory("Azure.Clients", null, true); DiagnosticScope scope = clientDiagnostics.CreateScope("ActivityName"); scope.Start(); (string Key, object Value, DiagnosticListener)startEvent = testListener.Events.Dequeue(); Activity activity = Activity.Current; scope.Dispose(); (string Key, object Value, DiagnosticListener)stopEvent = testListener.Events.Dequeue(); Assert.Null(Activity.Current); Assert.AreEqual("ActivityName.Start", startEvent.Key); Assert.AreEqual("ActivityName.Stop", stopEvent.Key); Assert.AreEqual(ActivityIdFormat.W3C, activity.IdFormat); CollectionAssert.IsEmpty(activity.Tags); }
public virtual Response <WrapResult> WrapKey(KeyWrapAlgorithm algorithm, byte[] key, CancellationToken cancellationToken = default) { var parameters = new KeyWrapParameters() { Algorithm = algorithm.ToString(), Key = key }; using DiagnosticScope scope = Pipeline.CreateScope($"{nameof(RemoteCryptographyClient)}.{nameof(WrapKey)}"); scope.AddAttribute("key", _keyId); scope.Start(); try { return(Pipeline.SendRequest(RequestMethod.Post, parameters, () => new WrapResult { Algorithm = algorithm }, cancellationToken, "/wrapKey")); } catch (Exception e) { scope.Failed(e); throw; } }
private async ValueTask <Response> UpdateStatusAsync(bool async, CancellationToken cancellationToken) { using DiagnosticScope scope = _diagnostics.CreateScope(_updateStatusScopeName); if (_scopeAttributes != null) { foreach (KeyValuePair <string, string> attribute in _scopeAttributes) { scope.AddAttribute(attribute.Key, attribute.Value); } } scope.Start(); try { return(await UpdateStateAsync(async, cancellationToken).ConfigureAwait(false)); } catch (Exception e) { scope.Failed(e); throw; } }
public virtual Response <UnwrapResult> UnwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, CancellationToken cancellationToken = default) { var parameters = new KeyWrapParameters() { Algorithm = algorithm.ToString(), Key = encryptedKey }; using DiagnosticScope scope = Pipeline.CreateScope("Azure.Security.KeyVault.Keys.Cryptography.RemoteCryptographyClient.UnwrapKey"); scope.AddAttribute("key", _keyId); scope.Start(); try { return(Pipeline.SendRequest(RequestMethod.Post, parameters, () => new UnwrapResult { Algorithm = algorithm }, cancellationToken, "/unwrapKey")); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Generate authorization for copying a custom model into the target Form Recognizer resource. /// </summary> /// <param name="modelId">A unique ID for your copied model. If not specified, a model ID will be created for you.</param> /// <param name="modelDescription">An optional description to add to the model.</param> /// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param> /// <returns>A <see cref="Response{T}"/> representing the result of the operation. It can be cast to <see cref="CopyAuthorization"/> containing /// the authorization information necessary to copy a custom model into a target Form Recognizer resource.</returns> public virtual async Task <Response <CopyAuthorization> > GetCopyAuthorizationAsync(string modelId = default, string modelDescription = default, CancellationToken cancellationToken = default) { modelId ??= Guid.NewGuid().ToString(); var request = new AuthorizeCopyRequest(modelId) { Description = modelDescription }; using DiagnosticScope scope = Diagnostics.CreateScope($"{nameof(DocumentModelAdministrationClient)}.{nameof(GetCopyAuthorization)}"); scope.Start(); try { var response = await ServiceClient.AuthorizeCopyDocumentModelAsync(request, cancellationToken).ConfigureAwait(false); return(Response.FromValue(response.Value, response.GetRawResponse())); } catch (Exception e) { scope.Failed(e); throw; } }
public async Task <FetchResult> FetchAsync( string dtmi, Uri repositoryUri, ModelDependencyResolution dependencyResolution, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(HttpModelFetcher)}.{nameof(Fetch)}"); scope.Start(); try { Queue <string> work = PrepareWork(dtmi, repositoryUri, dependencyResolution == ModelDependencyResolution.TryFromExpanded); string remoteFetchError = string.Empty; RequestFailedException requestFailedExceptionThrown = null; Exception genericExceptionThrown = null; while (work.Count != 0) { cancellationToken.ThrowIfCancellationRequested(); string tryContentPath = work.Dequeue(); ModelsRepositoryEventSource.Instance.FetchingModelContent(tryContentPath); try { string content = await EvaluatePathAsync(tryContentPath, cancellationToken).ConfigureAwait(false); return(new FetchResult() { Definition = content, Path = tryContentPath }); } catch (RequestFailedException ex) { requestFailedExceptionThrown = ex; } catch (Exception ex) { genericExceptionThrown = ex; } if (genericExceptionThrown != null || requestFailedExceptionThrown != null) { remoteFetchError = $"{string.Format(CultureInfo.InvariantCulture, StandardStrings.GenericGetModelsError, dtmi)} " + string.Format(CultureInfo.InvariantCulture, StandardStrings.ErrorFetchingModelContent, tryContentPath); } } if (requestFailedExceptionThrown != null) { throw new RequestFailedException( requestFailedExceptionThrown.Status, remoteFetchError, requestFailedExceptionThrown.ErrorCode, requestFailedExceptionThrown); } else { throw new RequestFailedException( (int)HttpStatusCode.BadRequest, remoteFetchError, null, genericExceptionThrown); } } catch (Exception ex) { scope.Failed(ex); throw; } }
public void Start() { _scope.Start(); }
/// <summary> /// Submit a <see cref="BlobBatch"/> of sub-operations. /// </summary> /// <param name="batch"> /// A <see cref="BlobBatch"/> of sub-operations. /// </param> /// <param name="throwOnAnyFailure"> /// A value indicating whether or not to throw exceptions for /// sub-operation failures. /// </param> /// <param name="async"> /// Whether to invoke the operation asynchronously. /// </param> /// <param name="cancellationToken"> /// Optional <see cref="CancellationToken"/> to propagate notifications /// that the operation should be cancelled. /// </param> /// <returns> /// A <see cref="Response"/> on successfully submitting. /// </returns> /// <remarks> /// A <see cref="RequestFailedException"/> will be thrown if /// a failure to submit the batch occurs. Individual sub-operation /// failures will only throw if <paramref name="throwOnAnyFailure"/> is /// true and be wrapped in an <see cref="AggregateException"/>. /// </remarks> private async Task <Response> SubmitBatchInternal( BlobBatch batch, bool throwOnAnyFailure, bool async, CancellationToken cancellationToken) { DiagnosticScope scope = ClientDiagnostics.CreateScope($"{nameof(BlobBatchClient)}.{nameof(SubmitBatch)}"); try { scope.Start(); batch = batch ?? throw new ArgumentNullException(nameof(batch)); if (batch.Submitted) { throw BatchErrors.CannotResubmitBatch(nameof(batch)); } else if (!batch.IsAssociatedClient(this)) { throw BatchErrors.BatchClientDoesNotMatch(nameof(batch)); } // Get the sub-operation messages to submit IList <HttpMessage> messages = batch.GetMessagesToSubmit(); if (messages.Count == 0) { throw BatchErrors.CannotSubmitEmptyBatch(nameof(batch)); } // TODO: Consider validating the upper limit of 256 messages // Merge the sub-operations into a single multipart/mixed Stream (Stream content, string contentType) = await MergeOperationRequests( messages, async, cancellationToken) .ConfigureAwait(false); if (IsContainerScoped) { ResponseWithHeaders <Stream, ContainerSubmitBatchHeaders> response; if (async) { response = await _containerRestClient.SubmitBatchAsync( containerName : ContainerName, contentLength : content.Length, multipartContentType : contentType, body : content, cancellationToken : cancellationToken) .ConfigureAwait(false); } else { response = _containerRestClient.SubmitBatch( containerName: ContainerName, contentLength: content.Length, multipartContentType: contentType, body: content, cancellationToken: cancellationToken); } await UpdateOperationResponses( messages, response.GetRawResponse(), response.Value, response.Headers.ContentType, throwOnAnyFailure, async, cancellationToken) .ConfigureAwait(false); return(response.GetRawResponse()); } else { ResponseWithHeaders <Stream, ServiceSubmitBatchHeaders> response; if (async) { response = await _serviceRestClient.SubmitBatchAsync( contentLength : content.Length, multipartContentType : contentType, body : content, cancellationToken : cancellationToken) .ConfigureAwait(false); } else { response = _serviceRestClient.SubmitBatch( contentLength: content.Length, multipartContentType: contentType, body: content, cancellationToken: cancellationToken); } await UpdateOperationResponses( messages, response.GetRawResponse(), response.Value, response.Headers.ContentType, throwOnAnyFailure, async, cancellationToken) .ConfigureAwait(false); return(response.GetRawResponse()); } } catch (Exception ex) { scope.Failed(ex); throw; } finally { scope.Dispose(); } }
/// <summary> Publishes a batch of EventGridEvents to an Azure Event Grid topic. </summary> /// <param name="events"> An array of events to be published to Event Grid. </param> /// <param name="async">Whether to invoke the operation asynchronously.</param> /// <param name="cancellationToken"> The cancellation token to use. </param> private async Task <Response> SendEventsInternal(IEnumerable <EventGridEvent> events, bool async, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(EventGridPublisherClient)}.{nameof(SendEvents)}"); scope.Start(); try { // List of events cannot be null Argument.AssertNotNull(events, nameof(events)); List <EventGridEventInternal> eventsWithSerializedPayloads = new List <EventGridEventInternal>(); foreach (EventGridEvent egEvent in events) { // Individual events cannot be null Argument.AssertNotNull(egEvent, nameof(egEvent)); JsonDocument data; if (egEvent.Data is BinaryData binaryEventData) { try { data = JsonDocument.Parse(binaryEventData); } catch (JsonException) { data = SerializeObjectToJsonDocument(binaryEventData.ToString(), typeof(string), cancellationToken); } } else { data = SerializeObjectToJsonDocument(egEvent.Data, egEvent.Data.GetType(), cancellationToken); } EventGridEventInternal newEGEvent = new EventGridEventInternal( egEvent.Id, egEvent.Subject, data.RootElement, egEvent.EventType, egEvent.EventTime, egEvent.DataVersion) { Topic = egEvent.Topic }; eventsWithSerializedPayloads.Add(newEGEvent); } if (async) { // Publish asynchronously if called via an async path return(await _serviceRestClient.PublishEventsAsync( _hostName, eventsWithSerializedPayloads, cancellationToken).ConfigureAwait(false)); } else { return(_serviceRestClient.PublishEvents( _hostName, eventsWithSerializedPayloads, cancellationToken)); } } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Returns a single segment of containers starting from the specified marker. /// /// For more information, see /// <see href="https://docs.microsoft.com/rest/api/storageservices/list-queues1"> /// List Queues</see>. /// </summary> /// <param name="marker"> /// Marker from the previous request. /// </param> /// <param name="traits"> /// Optional trait options for shaping the queues. /// </param> /// <param name="prefix"> /// Optional string that filters the results to return only queues /// whose name begins with the specified <paramref name="prefix"/>. /// </param> /// <param name="pageSizeHint"> /// Optional hint to specify the desired size of the page returned. /// </param> /// <param name="async"> /// Whether to invoke the operation asynchronously. /// </param> /// <param name="cancellationToken"> /// <see cref="CancellationToken"/> /// </param> /// <returns> /// A single segment of containers starting from the specified marker, including the next marker if appropriate. /// </returns> /// <remarks> /// Use an empty marker to start enumeration from the beginning. Queue names are returned in lexicographic order. /// After getting a segment, process it, and then call ListQueuesSegmentAsync again (passing in the next marker) to get the next segment. /// </remarks> internal async Task <Response <ListQueuesSegmentResponse> > GetQueuesInternal( string marker, QueueTraits traits, string prefix, int?pageSizeHint, bool async, CancellationToken cancellationToken) { using (ClientConfiguration.Pipeline.BeginLoggingScope(nameof(QueueServiceClient))) { ClientConfiguration.Pipeline.LogMethodEnter( nameof(QueueServiceClient), message: $"{nameof(Uri)}: {Uri}\n" + $"{nameof(marker)}: {marker}\n" + $"{nameof(traits)}: {traits}\n" + $"{nameof(prefix)}: {prefix}"); DiagnosticScope scope = ClientConfiguration.ClientDiagnostics.CreateScope($"{nameof(QueueServiceClient)}.{nameof(GetQueues)}"); try { ResponseWithHeaders <ListQueuesSegmentResponse, ServiceListQueuesSegmentHeaders> response; scope.Start(); IEnumerable <string> includeTypes = traits.AsIncludeTypes(); if (async) { response = await _serviceRestClient.ListQueuesSegmentAsync( prefix : prefix, marker : marker, maxresults : pageSizeHint, include : includeTypes.Any()?includeTypes : null, cancellationToken : cancellationToken) .ConfigureAwait(false); } else { response = _serviceRestClient.ListQueuesSegment( prefix: prefix, marker: marker, maxresults: pageSizeHint, include: includeTypes.Any() ? includeTypes : null, cancellationToken: cancellationToken); } if ((traits & QueueTraits.Metadata) != QueueTraits.Metadata) { IEnumerable <QueueItem> queueItems = response.Value.QueueItems; foreach (QueueItem queueItem in queueItems) { queueItem.Metadata = null; } } return(response); } catch (Exception ex) { ClientConfiguration.Pipeline.LogException(ex); scope.Failed(ex); throw; } finally { ClientConfiguration.Pipeline.LogMethodExit(nameof(QueueServiceClient)); scope.Dispose(); } } }
private async Task <Response <PathInfo> > UploadInParallelAsync( Stream content, int blockSize, PathHttpHeaders httpHeaders, DataLakeRequestConditions conditions, IProgress <long> progressHandler, CancellationToken cancellationToken) { // Wrap the staging and commit calls in an Upload span for // distributed tracing DiagnosticScope scope = _client.ClientDiagnostics.CreateScope( _operationName ?? $"{nameof(Azure)}.{nameof(Storage)}.{nameof(Files)}.{nameof(DataLake)}.{nameof(DataLakeFileClient)}.{nameof(DataLakeFileClient.Upload)}"); try { scope.Start(); // Wrap progressHandler in a AggregatingProgressIncrementer to prevent // progress from being reset with each stage blob operation. if (progressHandler != null) { progressHandler = new AggregatingProgressIncrementer(progressHandler); } // A list of tasks that are currently executing which will // always be smaller than _maxWorkerCount List <Task> runningTasks = new List <Task>(); // We need to keep track of how much data we have appended to // calculate offsets for the next appends, and the final // position to flush long appendedBytes = 0; // Partition the stream into individual blocks await foreach (ChunkedStream block in PartitionedUploadExtensions.GetBlocksAsync( content, blockSize, async: true, _arrayPool, cancellationToken).ConfigureAwait(false)) { // Start appending the next block (but don't await the Task!) Task task = AppendBlockAsync( block, appendedBytes, conditions?.LeaseId, progressHandler, cancellationToken); // Add the block to our task and commit lists runningTasks.Add(task); appendedBytes += block.Length; // If we run out of workers if (runningTasks.Count >= _maxWorkerCount) { // Wait for at least one of them to finish await Task.WhenAny(runningTasks).ConfigureAwait(false); // Clear any completed blocks from the task list for (int i = 0; i < runningTasks.Count; i++) { Task runningTask = runningTasks[i]; if (!runningTask.IsCompleted) { continue; } await runningTask.ConfigureAwait(false); runningTasks.RemoveAt(i); i--; } } } // Wait for all the remaining blocks to finish staging and then // commit the block list to complete the upload await Task.WhenAll(runningTasks).ConfigureAwait(false); return(await _client.FlushAsync( position : appendedBytes, httpHeaders : httpHeaders, conditions : conditions, cancellationToken : cancellationToken) .ConfigureAwait(false)); } catch (Exception ex) { scope.Failed(ex); throw; } finally { scope.Dispose(); } }
private Response <PathInfo> UploadInSequence( Stream content, int blockSize, PathHttpHeaders httpHeaders, DataLakeRequestConditions conditions, IProgress <long> progressHandler, CancellationToken cancellationToken) { // Wrap the append and flush calls in an Upload span for // distributed tracing DiagnosticScope scope = _client.ClientDiagnostics.CreateScope( _operationName ?? $"{nameof(Azure)}.{nameof(Storage)}.{nameof(Files)}.{nameof(DataLake)}.{nameof(DataLakeFileClient)}.{nameof(DataLakeFileClient.Upload)}"); try { scope.Start(); // Wrap progressHandler in a AggregatingProgressIncrementer to prevent // progress from being reset with each append file operation. if (progressHandler != null) { progressHandler = new AggregatingProgressIncrementer(progressHandler); } // Partition the stream into individual blocks and stage them // We need to keep track of how much data we have appended to // calculate offsets for the next appends, and the final // position to flush long appendedBytes = 0; foreach (ChunkedStream block in PartitionedUploadExtensions.GetBlocksAsync( content, blockSize, async: false, _arrayPool, cancellationToken).EnsureSyncEnumerable()) { // Dispose the block after the loop iterates and return its memory to our ArrayPool using (block) { // Append the next block _client.Append( new MemoryStream(block.Bytes, 0, block.Length, writable: false), offset: appendedBytes, leaseId: conditions?.LeaseId, progressHandler: progressHandler, cancellationToken: cancellationToken); appendedBytes += block.Length; } } // Commit the block list after everything has been staged to // complete the upload return(_client.Flush( position: appendedBytes, httpHeaders: httpHeaders, conditions: conditions, cancellationToken: cancellationToken)); } catch (Exception ex) { scope.Failed(ex); throw; } finally { scope.Dispose(); } }
/// <summary> Publishes a batch of CloudEvents to an Azure Event Grid topic. </summary> /// <param name="events"> An array of events to be published to Event Grid. </param> /// <param name="async">Whether to invoke the operation asynchronously.</param> /// <param name="cancellationToken"> The cancellation token to use. </param> private async Task <Response> SendCloudEventsInternal(IEnumerable <CloudEvent> events, bool async, CancellationToken cancellationToken = default) { using DiagnosticScope scope = _clientDiagnostics.CreateScope($"{nameof(EventGridPublisherClient)}.{nameof(SendEvents)}"); scope.Start(); try { // List of events cannot be null Argument.AssertNotNull(events, nameof(events)); List <CloudEventInternal> eventsWithSerializedPayloads = new List <CloudEventInternal>(); foreach (CloudEvent cloudEvent in events) { // Individual events cannot be null Argument.AssertNotNull(cloudEvent, nameof(cloudEvent)); CloudEventInternal newCloudEvent = new CloudEventInternal( cloudEvent.Id, cloudEvent.Source, cloudEvent.Type, "1.0") { Time = cloudEvent.Time, DataBase64 = cloudEvent.DataBase64, Datacontenttype = cloudEvent.DataContentType, Dataschema = cloudEvent.DataSchema, Subject = cloudEvent.Subject }; foreach (KeyValuePair <string, object> kvp in cloudEvent.ExtensionAttributes) { newCloudEvent.Add(kvp.Key, new CustomModelSerializer(kvp.Value, _dataSerializer, cancellationToken)); } // The 'Data' property is optional for CloudEvents // Additionally, if the type of data is binary, 'Data' will not be populated (data will be stored in 'DataBase64' instead) if (cloudEvent.Data != null) { JsonDocument data = SerializeObjectToJsonDocument(cloudEvent.Data, cloudEvent.Data.GetType(), cancellationToken); newCloudEvent.Data = data.RootElement; } eventsWithSerializedPayloads.Add(newCloudEvent); } if (async) { // Publish asynchronously if called via an async path return(await _serviceRestClient.PublishCloudEventEventsAsync( _hostName, eventsWithSerializedPayloads, cancellationToken).ConfigureAwait(false)); } else { return(_serviceRestClient.PublishCloudEventEvents( _hostName, eventsWithSerializedPayloads, cancellationToken)); } } catch (Exception e) { scope.Failed(e); throw; } }