private static async Task UploadToAzureBlob(MeetingMinutesUiPathArguments meetingMinutesUiPathArguments) { var storageConnectionString = Settings.AzureStorageConnectionString; if (CloudStorageAccount.TryParse(storageConnectionString, out var storageAccount)) { WriteLine("== Creating Cloud Blob Client =="); var cloudBlobClient = storageAccount.CreateCloudBlobClient(); var cloudBlobContainer = cloudBlobClient.GetContainerReference("cognitiveserviceoutput"); await cloudBlobContainer.CreateIfNotExistsAsync(); var permissions = new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Blob }; await cloudBlobContainer.SetPermissionsAsync(permissions); var outputFilesPath = Directory.GetFiles(OutputFolder, "*.txt", SearchOption.TopDirectoryOnly); foreach (var outputFilePath in outputFilesPath.Where(x => x.Contains(_audioWriter.WavFileCount.ToString()))) { var fileName = GetFileName(outputFilePath); WriteLine($"== Uploading {fileName} to Azure Blob (cognitiveservicesoutput)"); var cloudBlockBlob = cloudBlobContainer.GetBlockBlobReference(fileName); await cloudBlockBlob.UploadFromFileAsync(outputFilePath); } WriteLine("== Done uploading files to Azure Blob =="); WriteLine("List blobs in container."); BlobContinuationToken blobContinuationToken = null; do { var results = await cloudBlobContainer.ListBlobsSegmentedAsync(null, blobContinuationToken); // Get the value of the continuation token returned by the listing call. blobContinuationToken = results.ContinuationToken; var cloudBlockBlobs = results.Results.OfType <CloudBlockBlob>().ToList(); meetingMinutesUiPathArguments.DownloadUrls.AddRange(cloudBlockBlobs.Select(x => x.Uri.AbsoluteUri) .Where(x => x.Contains(_audioWriter.WavFileCount.ToString()))); foreach (var item in meetingMinutesUiPathArguments.DownloadUrls) { WriteLine(item); } } while (blobContinuationToken != null); } else { WriteLine("A connection string has not been defined in the system environment variable."); WriteLine("Press any key to exist."); ReadLine(); } }
private static async Task CallCognitiveServices(MeetingMinutesUiPathArguments meetingMinutesUiPathArguments) { try { var wavFileCount = _audioWriter.WavFileCount; _audioWriter = new AudioWriter(OutputFolder); meetingMinutesUiPathArguments.KeyPhrasesFilePath = $"{wavFileCount}.keyphrases.txt"; meetingMinutesUiPathArguments.MinutesFilePath = $"{wavFileCount}.minutes.txt"; meetingMinutesUiPathArguments.SentimentFilePath = $"{wavFileCount}.sentiment.txt"; meetingMinutesUiPathArguments.TranscribedFilePath = $"{wavFileCount}.transcribed.txt"; var fullTranscribeSpeechPath = Combine(OutputFolder, meetingMinutesUiPathArguments.TranscribedFilePath); var meetingMinutesFilePath = Combine(OutputFolder, meetingMinutesUiPathArguments.MinutesFilePath); var keyExtractionFilePath = Combine(OutputFolder, meetingMinutesUiPathArguments.KeyPhrasesFilePath); var sentimentAnalysisFilePath = Combine(OutputFolder, meetingMinutesUiPathArguments.SentimentFilePath); var config = SpeechConfig.FromSubscription(Settings.SpeechServiceSubscriptionKey, "eastus"); config.SpeechRecognitionLanguage = "en-US"; WriteLine("===== Initializing Speech Identifier ====="); var speakerIdentifierHttpRequests = new List <Task>(); SpeechIdentifier.SpeechIdentifier speechIdentifier = new SpeechIdentifier.SpeechIdentifier(_audioWriter.OutputFilePath, speakerIdentifierHttpRequests); await speechIdentifier.IdentifySpeakers(); Task.WaitAll(speakerIdentifierHttpRequests.ToArray()); WriteLine("===== Done Speaker Identification ====="); WriteLine(); WriteLine("===== Transcribing Identified Speakers ====="); if (await TranscribeIdentifiedSpeakers(meetingMinutesFilePath, speechIdentifier, _audioWriter.OutputFilePath, config) ) { return; } WriteLine("===== Done Transcribing Identified Speakers ====="); WriteLine(); WriteLine("===== Transcribing entire audio ====="); using (var fullTranscribeSpeechWriter = new StreamWriter(fullTranscribeSpeechPath)) { var transcriber = new Transcriber(_audioWriter.OutputFilePath, fullTranscribeSpeechWriter); await transcriber.TranscribeSpeechFromWavFileInput(config); } WriteLine("===== Done Transcribing entire audio ====="); WriteLine(); WriteLine("===== Initializing Key Extraction and Sentiment Analysis ====="); var textAnalytics = new TextAnalytics.TextAnalytics(meetingMinutesFilePath, fullTranscribeSpeechPath); textAnalytics.KeyExtraction(keyExtractionFilePath); textAnalytics.SentimentAnalysis(sentimentAnalysisFilePath); WriteLine("===== Done Key Extraction and Sentiment Analysis ====="); WriteLine(); } catch (Exception e) { WriteLine(e); ReadLine(); throw; } }
private static async Task WebSocketClientOnOnMessage(MessageEventArgs e) { try { if (e.Data == null) { return; } var activitySet = JsonConvert.DeserializeObject <ActivitySet>(e.Data); if (activitySet == null) { return; } foreach (var activity in activitySet.Activities) { if (activity.Type != ActivityTypes.Message || string.IsNullOrEmpty(activity.Text)) { continue; } if (activity.Text.Contains("Record_Start")) { if (_audioWriter != null) { WriteLine( "Tried to record again but can only have one recording at a time. Stop the other recorder first."); continue; } _audioWriter = new AudioWriter(OutputFolder); _audioWriter.StartRecording(); } else if (activity.Text.Contains("Record_Stop")) { var messages = activity.Text.Split(','); var jobId = messages[1]; _audioWriter?.StopRecording(); var uiPathDownloadArguments = new MeetingMinutesUiPathArguments { JobId = jobId, ServiceUrl = Settings.BotServiceUrl, BotAppId = Settings.BotAppId, BotAppPassword = Settings.BotAppPassword, EmailToSend = Settings.EmailToSend, EmailBody = Settings.EmailBody, EmailSubject = $"Meeting minutes for {DateTime.Today.ToShortDateString()}", }; await CallCognitiveServices(uiPathDownloadArguments); await UploadToAzureBlob(uiPathDownloadArguments); await _uiPathHttpClient.SendUiPathJob(uiPathDownloadArguments, Settings.UiPathMeetingMinutesJobKey); _audioWriter = null; } } } catch (Exception exception) { WriteLine(exception); throw; } }