private async void GetIdentificationOperationStatus(OperationLocation location) { try { while (true) { IdentificationOperation result = await _speakerIdentificationClient.CheckIdentificationStatusAsync(location); if (result.Status != Status.Running) { RaiseOnIdentificationStatusUpdated(new SpeakerIdentificationStatusUpdateEventArgs(result.Status.ToString(), $"Enrollment finished with message: {result.Message}.") { IdentifiedProfile = result.ProcessingResult }); break; } RaiseOnIdentificationStatusUpdated(new SpeakerIdentificationStatusUpdateEventArgs(result.Status.ToString(), "Identifying...")); await Task.Delay(1000); } } catch (IdentificationException ex) { RaiseOnIdentificationError(new SpeakerIdentificationErrorEventArgs($"Failed to get operation status: {ex.Message}")); } catch (Exception ex) { RaiseOnIdentificationError(new SpeakerIdentificationErrorEventArgs($"Failed to get operation status: {ex.Message}")); } }
/// <summary> /// Gets the identification operation status or result asynchronously /// </summary> /// <param name="location">The location returned upon calling the identification operation</param> /// <exception cref="IdentificationException">Thrown in case of internal server error or an url</exception> /// <exception cref="TimeoutException">Thrown in case the connection timed out</exception> /// <returns>The identification object encapsulating the result</returns> public async Task <IdentificationOperation> CheckIdentificationStatusAsync(OperationLocation location) { try { IdentificationOperation operation = await CheckStatusAsync <IdentificationOperation, IdentificationException>(location.Url).ConfigureAwait(false); return(operation); } catch (TaskCanceledException exception) { throw new TimeoutException("Connection timed out: " + exception.Message); } }
private async void identifySpeaker(string _selectedFile) { SpeakerIdentificationServiceClient _serviceClient; OperationLocation processPollingLocation; _serviceClient = new SpeakerIdentificationServiceClient("e5404f463d1242ad8ce61c5422afc4bf"); Profile[] allProfiles = await _serviceClient.GetProfilesAsync(); Guid[] testProfileIds = new Guid[allProfiles.Length]; for (int i = 0; i < testProfileIds.Length; i++) { testProfileIds[i] = allProfiles[i].ProfileId; } using (Stream audioStream = File.OpenRead(_selectedFile)) { _selectedFile = ""; processPollingLocation = await _serviceClient.IdentifyAsync(audioStream, testProfileIds, true); } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { writeUser("User: "******"User: unknown"); break; } numOfRetries--; } if (numOfRetries <= 0) { writeUser("User: unknown"); } }
public async Task <IdentificationOperation> RecognizeSpeaker(string recordingFileName) { var srsc = new SpeakerIdentificationServiceClient(Settings.Instance.SpeakerRecognitionApiKeyValue); var profiles = await srsc.GetProfilesAsync(); //First we choose set of profiles we want to try match speaker of narration with Guid[] testProfileIds = new Guid[profiles.Length]; for (int i = 0; i < testProfileIds.Length; i++) { testProfileIds[i] = profiles[i].ProfileId; } //IdentifyAsync is longer operation so we need to implement result polling mechanism OperationLocation processPollingLocation; using (Stream audioStream = File.OpenRead(recordingFileName)) { processPollingLocation = await srsc.IdentifyAsync(audioStream, testProfileIds, true); } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); // while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await srsc.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Microsoft.ProjectOxford.SpeakerRecognition.Contract.Identification.Status.Succeeded) { break; } else if (identificationResponse.Status == Microsoft.ProjectOxford.SpeakerRecognition.Contract.Identification.Status.Failed) { throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } return(identificationResponse); }
/// <summary> /// Identify a stream of audio /// </summary> /// <param name="stream">Audio buffer to be recognized</param> /// <param name="serviceClient">Client used in identifying the streamed audio wave</param> /// <param name="clientId">Client ID</param> /// <param name="requestId">Request ID</param> public async Task IdentifyStreamAsync(Stream stream, SpeakerIdentificationServiceClient serviceClient, Guid clientId, int requestId) { try { OperationLocation processPollingLocation = await serviceClient.IdentifyAsync(stream, _speakerIds, true).ConfigureAwait(false); var numberOfPollingRetries = 3; while (numberOfPollingRetries > 0) { await Task.Delay(TimeSpan.FromSeconds(TimeSpanBetweenPollingRetries)); IdentificationOperation identificationResponse = await serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { var result = new RecognitionResult(identificationResponse.ProcessingResult, clientId, requestId); _resultCallback(result); break; } if (identificationResponse.Status == Status.Failed) { var failureResult = new RecognitionResult(false, identificationResponse.Message, requestId); _resultCallback(failureResult); return; } numberOfPollingRetries--; } if (numberOfPollingRetries <= 0) { var failureResult = new RecognitionResult(false, "Request timeout.", requestId); _resultCallback(failureResult); } } catch (Exception ex) { var result = new RecognitionResult(false, ex.Message, requestId); _resultCallback(result); } }
private void RecognizeSpeaker(Stream stream) { // Reset pointer stream.Seek(0, SeekOrigin.Begin); SpeakerIdentificationServiceClient speakerIDClient = new SpeakerIdentificationServiceClient("c6b005dcf13e45b6a91485d38763277b"); // Fetch existing profiles ObjectCache memCache = MemoryCache.Default; var profiles = memCache.Get("SpeakerProfiles") != null?memCache.Get("SpeakerProfiles") as List <Profile> : new List <Profile>(); List <Guid> testProfileIds = (from prof in profiles select prof.ProfileId).ToList(); OperationLocation processPollingLocation = speakerIDClient.IdentifyAsync(stream, testProfileIds.ToArray(), false).Result; IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { Task.Delay(timeBetweenRetries); identificationResponse = speakerIDClient.CheckIdentificationStatusAsync(processPollingLocation).Result; if (identificationResponse.Status == Status.Succeeded) { break; } else if (identificationResponse.Status == Status.Failed) { throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } //"Identification Done." //Values now accessible!! var _identificationResultTxtBlk = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); var _identificationConfidenceTxtBlk = identificationResponse.ProcessingResult.Confidence.ToString(); }
/// <summary> /// Identify a stream of audio /// </summary> /// <param name="stream">Audio buffer to be recognized</param> /// <param name="serviceClient">Client used in identifying the streamed audio wave</param> /// <param name="clientId">Client ID</param> /// <param name="requestId">Request ID</param> public async Task IdentifyStreamAsync(Stream stream, SpeakerIdentificationServiceClient serviceClient, Guid clientId, int requestId) { try { OperationLocation processPollingLocation; processPollingLocation = await serviceClient.IdentifyAsync(stream, this.speakerIds, forceShortAudio : true).ConfigureAwait(false); IdentificationOperation identificationResponse = null; int numOfRetries = int.Parse(ConfigurationManager.AppSettings["NumberOfPollingRetries"]); TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(int.Parse(ConfigurationManager.AppSettings["TimeSpanBetweenPollingRetries"])); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { var result = new RecognitionResult(identificationResponse.ProcessingResult, clientId, requestId); this.resultCallback(result); break; } else if (identificationResponse.Status == Status.Failed) { var failureResult = new RecognitionResult(false, identificationResponse.Message, requestId); this.resultCallback(failureResult); return; } numOfRetries--; } if (numOfRetries <= 0) { var failureResult = new RecognitionResult(false, "Request timeout.", requestId); this.resultCallback(failureResult); return; } } catch (Exception ex) { var result = new RecognitionResult(false, ex.Message, requestId); this.resultCallback(result); } }
private async void voiceIdentification() { try { _identificationResultStckPnl.Visibility = Visibility.Hidden; if (_waveIn != null) { _waveIn.StopRecording(); } TimeSpan timeBetweenSaveAndIdentify = TimeSpan.FromSeconds(5.0); await Task.Delay(timeBetweenSaveAndIdentify); SpeakerIdentificationServiceClient _serviceClient = new SpeakerIdentificationServiceClient(speakerAPISubscriptionKey); List <Guid> list = new List <Guid>(); Microsoft.ProjectOxford.SpeakerRecognition.Contract.Identification.Profile[] allProfiles = await _serviceClient.GetProfilesAsync(); int itemsCount = 0; foreach (Microsoft.ProjectOxford.SpeakerRecognition.Contract.Identification.Profile profile in allProfiles) { list.Add(profile.ProfileId); itemsCount++; } Guid[] selectedIds = new Guid[itemsCount]; for (int i = 0; i < itemsCount; i++) { selectedIds[i] = list[i]; } if (_selectedFile == "") { throw new Exception("No File Selected."); } speechSynthesizer.SpeakAsync("Please wait we are verifying your voice."); Title = String.Format("Identifying File..."); OperationLocation processPollingLocation; Console.WriteLine("Selected file is : {0}", _selectedFile); using (Stream audioStream = File.OpenRead(_selectedFile)) { //_selectedFile = ""; Console.WriteLine("Start"); Console.WriteLine("Audio File is : {0}", audioStream); processPollingLocation = await _serviceClient.IdentifyAsync(audioStream, selectedIds, true); Console.WriteLine("ProcesPolling Location : {0}", processPollingLocation); Console.WriteLine("Done"); } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); Console.WriteLine("Response is : {0}", identificationResponse); if (identificationResponse.Status == Microsoft.ProjectOxford.SpeakerRecognition.Contract.Identification.Status.Succeeded) { break; } else if (identificationResponse.Status == Microsoft.ProjectOxford.SpeakerRecognition.Contract.Identification.Status.Failed) { Console.WriteLine("In"); speechSynthesizer.SpeakAsync("Failed. Please make sure your voice is registered."); throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } Title = String.Format("Identification Done."); conn.Open(); SqlCommand cmd = conn.CreateCommand(); cmd.CommandType = System.Data.CommandType.Text; cmd.CommandText = "Select AccountNo, CustomerName From AccountDetails where AccountNo = (Select AccountNo From AuthenticationDetails where VoiceId = '" + identificationResponse.ProcessingResult.IdentifiedProfileId.ToString() + "')"; dr = cmd.ExecuteReader(); if (dr.HasRows) { while (dr.Read()) { accountNo = dr.GetInt32(0); voiceIdentifiedUserName = dr[1].ToString(); Console.WriteLine("Account No is : " + accountNo); Console.WriteLine("Identified as :" + voiceIdentifiedUserName); _identificationResultTxtBlk.Text = voiceIdentifiedUserName; } } dr.Close(); conn.Close(); if (_identificationResultTxtBlk.Text == "") { _identificationResultTxtBlk.Text = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); speechSynthesizer.SpeakAsync("Sorry we have not found your data."); return; } else { if (faceIdentifiedUserName == voiceIdentifiedUserName) { Console.WriteLine("Selected file is : {0}", _selectedFile); Stream stream = File.OpenRead(_selectedFile); verifySpeaker(stream); //speechSynthesizer.SpeakAsync("Hi."); //speechSynthesizer.SpeakAsync(_identificationResultTxtBlk.Text.ToString()); //speechSynthesizer.SpeakAsync("Thanks to verify your face and voice."); //speechSynthesizer.SpeakAsync("Now you can do your transactions"); } else { speechSynthesizer.SpeakAsync("Sorry we have found different voice identity from your face identity."); return; } _identificationConfidenceTxtBlk.Text = identificationResponse.ProcessingResult.Confidence.ToString(); _identificationResultStckPnl.Visibility = Visibility.Visible; GC.Collect(); } } catch (IdentificationException ex) { Console.WriteLine("Speaker Identification Error : " + ex.Message); GC.Collect(); } catch (Exception ex) { Console.WriteLine("Error : " + ex.Message); GC.Collect(); } }
async Task finishIdentification() { if (btnIdentify.IsEnabled == false) { return; // if user clicks and then comes timer event } btnIdentify.Content = "Start voice identification"; btnIdentify.IsEnabled = false; await CaptureMedia.StopRecordAsync(); Stream str = AudioStream.AsStream(); str.Seek(0, SeekOrigin.Begin); Profile[] selectedProfiles = new Profile[lbProfiles.Items.Count];; Guid[] testProfileIds = new Guid[selectedProfiles.Length]; for (int i = 0; i < lbProfiles.Items.Count; i++) { testProfileIds[i] = Guid.Parse((lbProfiles.Items[i] as ListBoxItem).Content.ToString()); } OperationLocation processPollingLocation; try { processPollingLocation = await _serviceClient.IdentifyAsync(str, testProfileIds); } catch (IdentificationException vx) { txtInfo.Text = vx.Message; CleanAfter(); return; } catch (Exception vx) { txtInfo.Text = vx.Message; CleanAfter(); return; } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { break; } else if (identificationResponse.Status == Status.Failed) { txtInfo.Text = identificationResponse.Message; CleanAfter(); return; } numOfRetries--; } if (numOfRetries <= 0) { txtInfo.Text = "Identification operation timeout."; } else { txtInfo.Text = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); txtInfo.Text = txtInfo.Text + Environment.NewLine + identificationResponse.ProcessingResult.Confidence.ToString(); } CleanAfter(); }
private async void identify(string ou) { MainWindow window = (MainWindow)Application.Current.MainWindow; try { window.Log("Identifying File..."); Profile[] selectedProfiles = SpeakersListPage.SpeakersList.GetSelectedProfiles(); Guid[] testProfileIds = new Guid[selectedProfiles.Length]; for (int i = 0; i < testProfileIds.Length; i++) { testProfileIds[i] = selectedProfiles[i].ProfileId; } List <string> list = new List <string>(); for (int j = 0; j < 15; j++) { list.Add(_selectedFile); } //list.Add(_selectedFile); string _selectedFile1 = _selectedFile.Substring(0, _selectedFile.Length - 4) + "1" + ".wav"; Concatenate(_selectedFile1, list); OperationLocation processPollingLocation; using (Stream audioStream = File.OpenRead(_selectedFile1)) { //for (int i = 1; i <= 2;i++ ) // audioStream.CopyTo(audioStream); //_selectedFile = ""; processPollingLocation = await _serviceClient.IdentifyAsync(audioStream, testProfileIds); } IdentificationOperation identificationResponse = null; int numOfRetries = 15; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { break; } else if (identificationResponse.Status == Status.Failed) { throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } window.Log("Identification Done."); tblock.Text = count + identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); output.Text += "\n" + identificationResponse.ProcessingResult.IdentifiedProfileId.ToString() + ":" + ou; count++; //_identificationResultTxtBlk.Text = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); //_identificationConfidenceTxtBlk.Text = identificationResponse.ProcessingResult.Confidence.ToString(); //_identificationResultStckPnl.Visibility = Visibility.Visible; } catch (IdentificationException ex) { window.Log("Speaker Identification Error: " + ex.Message); } catch (Exception ex) { window.Log("Error: " + ex.Message); } }
//This functionality will Verify the speaker with the passed Audio stream and the passed GUID //INPUT: Audio stream to veridy, Speaker ID, SampleLength //speakerVerificatioResult public async Task <speakerVerificationResult> verifySpeaker(Stream ms, Guid speakerID, double sampleLength) { speakerVerificationResult result = new speakerVerificationResult(); if (sampleLength > 2.5) { try { //If the audio stream is emtpy, then trow an error if (ms == null) { throw new Exception("No Audio stream found."); } //Create a array of prifiles to be used. We only authenticate against one, //In the future we can add multiple speakers into this array and then the system could tell us "who" is speaking, but this isn't what //the aim of this project is. We only want to get true/false if the current speaker is who we expect is is Guid[] testProfileIds = new Guid[1]; //Assign the selected enrollment item to the array testProfileIds[0] = speakerID; OperationLocation processPollingLocation; using (Stream audioStream = ms) { audioStream.Position = 0; processPollingLocation = await _serviceClient.IdentifyAsync(audioStream, testProfileIds, true); } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { break; } else if (identificationResponse.Status == Status.Failed) { throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } result.Status = identificationResponse.Status.ToString(); result.IdentifiedProfileId = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); result.Confidence = identificationResponse.ProcessingResult.Confidence.ToString(); result.Result = true; result.Message = ("VerifciationResult passed back"); } catch (IdentificationException ex) { result.Result = false; if (ex.Message.ToUpper().Contains("SPEAKERINVALID")) { result.Message = "Your voice sample could not be verified against the enrolled sample"; } else { result.Message = ("Speaker Identification Error: " + ex.Message); } } catch (Exception ex) { result.Result = false; result.Message = ("Error: " + ex.Message); } } else { result.Result = false; result.Message = ("Enroll: Audio sample need to be > 2.5 seconds"); } return(result); }
//private void loadFileBtn_Click(object sender, RoutedEventArgs e) //{ // identifyBtn.IsEnabled = true; // OpenFileDialog openFileDialog = new OpenFileDialog(); // openFileDialog.Filter = "WAV Files(*.wav)|*.wav"; // bool? result = openFileDialog.ShowDialog(); // if (!(bool)result) // { // Title = String.Format("No File Selected."); // return; // } // Title = String.Format("File Selected: " + openFileDialog.FileName); // _selectedFile = openFileDialog.FileName; //} private async void identifyBtn_Click(object sender, RoutedEventArgs e) { _identificationResultStckPnl.Visibility = Visibility.Hidden; //First Stop Recording... recordBtn.IsEnabled = true; identifyBtn.IsEnabled = false; if (_waveIn != null) { _waveIn.StopRecording(); } TimeSpan timeBetweenSaveAndIdentify = TimeSpan.FromSeconds(5.0); await Task.Delay(timeBetweenSaveAndIdentify); //await UpdateAllSpeakersAsync(); //Identify Voice //List<Guid> selectedItems = new List<Guid>(3); //selectedItems.Add(Guid.Parse("f7d5a9d2-9663-4504-b53c-ee0c2c975104")); //f7d5a9d2-9663-4504-b53c-ee0c2c975104 //selectedItems.Add(Guid.Parse("acec28d0-cfd5-4bc4-8840-03bb523a43f7")); //selectedItems.Add(Guid.Parse("7ce81071-ef9d-46cf-9d87-02d465b1a972")); //selectedItems.Add(Guid.Parse("f7d5a9d2-9663-4504-b53c-ee0c2c975104")); //selectedItems.Add(Guid.Parse("0501e357-e56d-46b0-87ad-957ef8744d9c")); //selectedItems.Add(Guid.Parse("aeb46767-24b7-4d9d-a9ad-dd7dc965b0bb")); //selectedItems.Add(Guid.Parse("26c11b8d-6de2-4c5e-971c-8c4dc774d5e1")); //selectedItems.Add(Guid.Parse("f7d5a9d2-9663-4504-b53c-ee0c2c975104")); //selectedItems.Add(Guid.Parse("8f85b2c6-688a-4d44-a8c4-370be910b8bf")); //Guid[] selectedIds = new Guid[1]; //int i = 0; //foreach (KeyValuePair<Guid, string> kv in enrollVoiceList) //{ // selectedIds[i] = kv.Key; // i++; //} //selectedIds[0] = selectedItems[0]; //for (int j = 0; j < 1; j++) //{ // selectedIds[i] = selectedItems[i]; //} List <Guid> list = new List <Guid>(); Profile[] allProfiles = await _serviceClient.GetProfilesAsync(); int itemsCount = 0; foreach (Profile profile in allProfiles) { list.Add(profile.ProfileId); itemsCount++; } Guid[] selectedIds = new Guid[1]; for (int i = 0; i < 1; i++) { selectedIds[i] = list[i]; } try { if (_selectedFile == "") { throw new Exception("No File Selected."); } Title = String.Format("Identifying File..."); OperationLocation processPollingLocation; using (Stream audioStream = File.OpenRead(_selectedFile)) { _selectedFile = ""; Console.WriteLine("Audio File is : {0}", audioStream); processPollingLocation = await _serviceClient.IdentifyAsync(audioStream, selectedIds, true); } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); Console.WriteLine("Response is : {0}", identificationResponse); if (identificationResponse.Status == Status.Succeeded) { break; } else if (identificationResponse.Status == Status.Failed) { throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } Title = String.Format("Identification Done."); if (identificationResponse.ProcessingResult.IdentifiedProfileId.ToString() == "aeb46767-24b7-4d9d-a9ad-dd7dc965b0bb") { _identificationResultTxtBlk.Text = "Toshif"; //aeb46767 - 24b7 - 4d9d - a9ad - dd7dc965b0bb //f7d5a9d2-9663-4504-b53c-ee0c2c975104 } else if (identificationResponse.ProcessingResult.IdentifiedProfileId.ToString() == "7ce81071-ef9d-46cf-9d87-02d465b1a972") { _identificationResultTxtBlk.Text = "Aakash"; } else if (identificationResponse.ProcessingResult.IdentifiedProfileId.ToString() == "acec28d0-cfd5-4bc4-8840-03bb523a43f7") { _identificationResultTxtBlk.Text = "Mohammad Toshif Khan"; //0501e357 - e56d - 46b0 - 87ad - 957ef8744d9c } else if (identificationResponse.ProcessingResult.IdentifiedProfileId.ToString() == "f7d5a9d2-9663-4504-b53c-ee0c2c975104") { _identificationResultTxtBlk.Text = "Nandini"; //f7d5a9d2 - 9663 - 4504 - b53c - ee0c2c975104 } else { _identificationResultTxtBlk.Text = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); } _identificationConfidenceTxtBlk.Text = identificationResponse.ProcessingResult.Confidence.ToString(); _identificationResultStckPnl.Visibility = Visibility.Visible; } catch (IdentificationException ex) { GC.Collect(); Title = String.Format("Speaker Identification Error: " + ex.Message); Console.WriteLine("Speaker Identification Error : " + ex.Message); } catch (Exception ex) { GC.Collect(); Title = String.Format("Error: " + ex.Message); } GC.Collect(); }
private async void _identifyBtn_Click(object sender, RoutedEventArgs e) { MainWindow window = (MainWindow)Application.Current.MainWindow; try { if (_selectedFile == "") { throw new Exception("No File Selected."); } window.Log("Identifying File..."); Profile[] selectedProfiles = SpeakersListPage.SpeakersList.GetSelectedProfiles(); Guid[] testProfileIds = new Guid[selectedProfiles.Length]; for (int i = 0; i < testProfileIds.Length; i++) { testProfileIds[i] = selectedProfiles[i].ProfileId; } OperationLocation processPollingLocation; using (Stream audioStream = File.OpenRead(_selectedFile)) { _selectedFile = ""; processPollingLocation = await _serviceClient.IdentifyAsync(audioStream, testProfileIds, ((sender as Button) == _identifyShortAudioBtn)); } IdentificationOperation identificationResponse = null; int numOfRetries = 10; TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5.0); while (numOfRetries > 0) { await Task.Delay(timeBetweenRetries); identificationResponse = await _serviceClient.CheckIdentificationStatusAsync(processPollingLocation); if (identificationResponse.Status == Status.Succeeded) { break; } else if (identificationResponse.Status == Status.Failed) { throw new IdentificationException(identificationResponse.Message); } numOfRetries--; } if (numOfRetries <= 0) { throw new IdentificationException("Identification operation timeout."); } window.Log("Identification Done."); _identificationResultTxtBlk.Text = identificationResponse.ProcessingResult.IdentifiedProfileId.ToString(); _identificationConfidenceTxtBlk.Text = identificationResponse.ProcessingResult.Confidence.ToString(); _identificationResultStckPnl.Visibility = Visibility.Visible; } catch (IdentificationException ex) { window.Log("Speaker Identification Error: " + ex.Message); } catch (Exception ex) { window.Log("Error: " + ex.Message); } }