// Async Speak output of a text private async Task SpeakAsyncLow(string text) { _speaking = true; // locks additional calls for Speak until finished talking this bit // Speech if (!_canSpeak || _synthesizer == null || _audioGraph == null || _deviceOutputNode == null) { LOG.LogError($"SpeakAsyncLow: Some item do not exist: cannot speak..\n[{_synthesizer}] [{_audioGraph}] [{_deviceOutputNode}]"); EndOfSpeak( ); return; } // Generate a new, independent audio stream from plain text. _stream = await _synthesizer.SynthesizeTextToStreamAsync(text); // Must create a MediaSource obj to derive a Stream Consumer InputNode _mediaSource?.Dispose( ); // clean old _mediaSource = MediaSource.CreateFromStream(_stream, _stream.ContentType); if (_mediaSourceInputNode != null) { // clean old nodes _mediaSourceInputNode.MediaSourceCompleted -= MediaSourceInputNode_MediaSourceCompleted; // detach handler _mediaSourceInputNode.Dispose( ); } // create the InputNode var resultMS = await _audioGraph.CreateMediaSourceAudioInputNodeAsync(_mediaSource); if (resultMS.Status != MediaSourceAudioInputNodeCreationStatus.Success) { // Cannot create input node LOG.LogError($"SpeakAsyncLow: MediaSourceAudioInputNode creation: {resultMS.Status}\nExtError: {resultMS.ExtendedError}"); EndOfSpeak( ); return; // cannot speak } _mediaSourceInputNode = resultMS.Node; // add a handler to stop and signale when finished _mediaSourceInputNode.MediaSourceCompleted += MediaSourceInputNode_MediaSourceCompleted; _mediaSourceInputNode.AddOutgoingConnection(_deviceOutputNode); // connect the graph // Speak it _audioGraph.Start( ); // Speaks in the current Thread - cannot be waited for in the same thread }
// </SnippetCreateFileOutputNode> // <SnippetCreateMediaSourceInputNode> private async Task CreateMediaSourceInputNode(System.Uri contentUri) { if (audioGraph == null) { return; } var adaptiveMediaSourceResult = await AdaptiveMediaSource.CreateFromUriAsync(contentUri); if (adaptiveMediaSourceResult.Status != AdaptiveMediaSourceCreationStatus.Success) { Debug.WriteLine("Failed to create AdaptiveMediaSource"); return; } var mediaSource = MediaSource.CreateFromAdaptiveMediaSource(adaptiveMediaSourceResult.MediaSource); CreateMediaSourceAudioInputNodeResult mediaSourceAudioInputNodeResult = await audioGraph.CreateMediaSourceAudioInputNodeAsync(mediaSource); if (mediaSourceAudioInputNodeResult.Status != MediaSourceAudioInputNodeCreationStatus.Success) { switch (mediaSourceAudioInputNodeResult.Status) { case MediaSourceAudioInputNodeCreationStatus.FormatNotSupported: Debug.WriteLine("The MediaSource uses an unsupported format"); break; case MediaSourceAudioInputNodeCreationStatus.NetworkError: Debug.WriteLine("The MediaSource requires a network connection and a network-related error occurred"); break; case MediaSourceAudioInputNodeCreationStatus.UnknownFailure: default: Debug.WriteLine("An unknown error occurred while opening the MediaSource"); break; } return; } mediaSourceInputNode = mediaSourceAudioInputNodeResult.Node; }