void wi_DataAvailable(object sender, NAudio.Wave.WaveInEventArgs e) { try { byte[] audioData = new byte[e.Buffer.Length]; Buffer.BlockCopy(e.Buffer, 0, audioData, 0, e.Buffer.Length); // use the ipaddress as in the server program stm_s.Write(audioData, 0, audioData.Length); //byte[] bb = new byte[100]; //int k = stm.Read(bb, 0, 100); //String answer = ""; //for (int i = 0; i < k; i++) // answer += Convert.ToChar(bb[i]); } catch (Exception ex) { logbox.AppendText(ex.Message, Color.Red); crash++; wavein.StopRecording(); stm_s.Close(); tcpclnt_sound.Close(); } }
// obsluga przycisku nagraj private void buttonRecord_Click(object sender, EventArgs e) { // jesli cos nie jest nagrywane if (wasRecored == false) { //jesli nie znaleziono urządzeń (mikrofonu) if (listBoxDevices.SelectedItems.Count == 0) { return; } // jesli nie została wybrana scieżka do zapisu nagrania wyświetl komunikat if (fileRecordPath == "") { MessageBox.Show("Wybierz miejsce w którym chcesz zapisać plik!"); } else { // nagrywanie do wczesniej wybranego pliku int deviceNumber = listBoxDevices.SelectedIndex; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); // nadanie czestotliwosci nagrywania, i standardu mono czy stereo wynikającego z urządzenia sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveFileWriter = new NAudio.Wave.WaveFileWriter(fileRecordPath, sourceStream.WaveFormat); sourceStream.StartRecording(); buttonRecord.Text = "Nagrywanie..."; wasRecored = true; } } else if (wasRecored == true) // jesli jest już coś nagrywane to zatrzymaj obecne nagrywanie i zmien tekst na przyciskach { if (soundOut != null) { soundOut.Stop(); soundOut.Dispose(); soundOut = null; buttonRecord.Text = "Nagraj"; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; buttonRecord.Text = "Nagraj"; } if (waveFileWriter != null) { waveFileWriter.Dispose(); waveFileWriter = null; buttonRecord.Text = "Nagraj"; } labelRecording.Text = ""; } }
//Record/Stop, user can record with many tries as possible private void recStpBtn_Click(object sender, EventArgs e) { //When pressed first, start recording if (mode) { try { recStpBtn.Text = "Stop"; wavSource = new NAudio.Wave.WaveIn(); wavSource.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1); wavSource.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(wavSource_DataAvail); wavSource.RecordingStopped += new EventHandler <NAudio.Wave.StoppedEventArgs>(wavSource_RecordingStop); wavFile = new NAudio.Wave.WaveFileWriter(@"C:\Pres_Proto\V2\MetroFrameworkDLLExample\RecordWAV\" + lvlStr + ".wav", wavSource.WaveFormat); wavSource.StartRecording(); mode = false; } catch (System.IO.IOException ex) { MessageBox.Show(ex.ToString()); } } //Else stop recording else { recStpBtn.Text = "Record"; wavSource.StopRecording(); mode = true; } }
//this ensures that all resources are correctly closed and disposed of when recording is stopped private void stopRecording() { if (sourceStream != null) { if (waveOut != null) //stops sound from playing and disposes { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) //stops sourcestream from recording and disposes { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } isRecording = false; return; } }
private void btnStopSpeech_Click(object sender, EventArgs e) { this.micClient.EndMicAndRecognition(); //this.micClient = null; boxSpeech.Text = ""; btnSpeech.Enabled = true; if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } string _selectedFile = "C:\\Users\\Mac\\Desktop\\check.wav"; if (identify) { identifySpeaker(_selectedFile); } }
private void button3_Click(object sender, EventArgs e) { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } }
//Record voice private void recordBtn_Click(object sender, EventArgs e) { if (setMode) { try { String filename = @"C:\Pres_Proto\MetroFrameworkDLLExample\RecordWAV\" + letters[curPos] + ".wav"; recordBtn.Text = "STOP"; wavSource = new NAudio.Wave.WaveIn(); wavSource.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1); wavSource.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(wavSource_DataAvail); wavSource.RecordingStopped += new EventHandler <NAudio.Wave.StoppedEventArgs>(wavSource_RecordingStop); wavFile = new NAudio.Wave.WaveFileWriter(filename, wavSource.WaveFormat); wavSource.StartRecording(); setMode = false; } catch (Exception) { throw; } } else { //When you press "STOP", it automatically compares wavSource.StopRecording(); String recordWAV_file = @"C:\Pres_Proto\MetroFrameworkDLLExample\RecordWAV\" + letters[curPos] + ".wav"; String refWAV_file = "Class" + LoginForm.classSec + "_kidAudio/" + letters[curPos] + ".wav"; File f1 = new File(recordWAV_file); File f2 = new File(refWAV_file); if (!f1.exists() || !f2.exists()) { MessageBox.Show("WARNING: One of the files might be missing!"); } else { float compute_Result = compareAudio(recordWAV_file, refWAV_file); if (compute_Result >= 10.0) { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n You Win !"); } else { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n Try Again !"); } } recordBtn.Text = "RECORD"; setMode = true; } }
private void microphoneList_SelectedIndexChanged(object sender, EventArgs e) { deviceNumber = microphoneList.SelectedIndex; if (sourceStream != null) { sourceStream.StopRecording(); } sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
public bool StopRecording() { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } return(true); }
public void StopRecording() { try { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } try { if (sourceStream != null) { //th.Abort(); sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } } catch (Exception e) { Console.WriteLine(e.Message); } try { if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } } catch (Exception e) { Console.WriteLine(e.Message); } } catch (Exception e2) { Console.WriteLine(e2.Message); } }
public void StopRecording() { if (m_oWaveOut != null) { m_oWaveOut.Stop(); m_oWaveOut.Dispose(); m_oWaveOut = null; } if (m_oSourceStream != null) { m_oSourceStream.StopRecording(); m_oSourceStream.Dispose(); m_oSourceStream = null; } if (m_oWaveWriter != null) { m_oWaveWriter.Dispose(); m_oWaveWriter = null; } }
private void StopAudioRecording() { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } }
private void StopRecording() { btnRecord.Content = "Record"; if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); } if (SourceStream != null) { SourceStream.StopRecording(); SourceStream.Dispose(); } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } if (output != null) { if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing || output.PlaybackState == NAudio.Wave.PlaybackState.Paused) { output.Stop(); output.Dispose(); output = null; } } if (stopWatch.IsRunning) { stopWatch.Stop(); currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); //txtBox.Text = currentTime; } else { currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); //txtBox.Text = currentTime; } }
private void btnStop_Click(object sender, RoutedEventArgs e) { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); } if (SourceStream != null) { SourceStream.StopRecording(); SourceStream.Dispose(); } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } if (output != null) { if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing || output.PlaybackState == NAudio.Wave.PlaybackState.Paused) { output.Stop(); output.Dispose(); output = null; } } if (stopWatch.IsRunning) { stopWatch.Stop(); currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); txtBox.Text = currentTime; } else { currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); txtBox.Text = currentTime; } }
//Stops the recording process used to start translating private void button3_Click(object sender, EventArgs e) { label1.Text = "Stop Recording"; if (wavout != null) { wavout.Stop(); wavout.Dispose(); wavout = null; } if (sourcestream != null) { sourcestream.StopRecording(); sourcestream.Dispose(); sourcestream = null; } if (wavewriter != null) { wavewriter.Dispose(); wavewriter = null; } }
//The timer function //Once the timer is over, it'll display the result that was recorded during this period private void countTimer_Tick(object sender, EventArgs e) { //startRecord(); seconds = seconds - 1; if (seconds == 0) { //When you press "STOP" //The recording and the counter are stopped //It then compares with the recorded audio and compares wavSource.StopRecording(); countTimer.Stop(); backBtn.Enabled = true; slideUp(sender, e); //The recorded voice of the student String recordWAV_file = @"C:\Pres_Proto\V2\MetroFrameworkDLLExample\RecordWAV\" + lvlStr + ".wav"; //The reference sample of the teacher String refWAV_file = @"C:\Pres_Proto\V2\MetroFrameworkDLLExample\TeacherWAV\" + lvlStr.ToLower() + ".wav"; java.io.File f1 = new java.io.File(recordWAV_file); java.io.File f2 = new java.io.File(refWAV_file); if (!f1.exists() || !f2.exists()) { MessageBox.Show("WARNING: One of the files might be missing!"); } else { float compute_Result = compareAudio(recordWAV_file, refWAV_file); if (compute_Result >= 10.0) { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n You Win !"); //If user wins, update level and display choice DialogResult diagRes = MessageBox.Show("Do you want to proceed or try again?", "important", MessageBoxButtons.YesNo); if (diagRes == DialogResult.Yes) { this.Hide(); //If colorblind, change theme and go to new level if (isColorBlind == true) { updateLevelProgress(); newLevel(isColorBlind); saveLevelProgress(lvlStr); } //otherwise, revert old theme and go to new level else { updateLevelProgress(); newLevel(isColorBlind); saveLevelProgress(lvlStr); } } //Otherwise, you can try again!! else if (diagRes == DialogResult.No) { this.Hide(); if (isColorBlind == true) { repeatLevel(isColorBlind); } else { repeatLevel(isColorBlind); } } } else { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n Try Again !"); } } //MessageBox.Show("DONE!"); timeLabel.Visible = false; secsLabel.Visible = false; } secsLabel.Text = Convert.ToString(seconds); }
private async void StopRec() { panel1.Visible = false; label1.Visible = false; label2.Visible = true; if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } await Task.Delay(30);//let the wav audio file be created properly. int senti; string line = Recognition(); // line.TrimEnd(' '); line = line + "."; senti = nlp.SentiAnalysis(line); Console.Text = "\n" + line; label2.Visible = false; if (senti >= 2) { SentiBox.Text = senti.ToString(); nlp.SentenceParser(line); string[] depText = nlp.dependency.Split(new string[] { ")," }, StringSplitOptions.None); foreach (string s in depText) { BasicDep.Text += "\r\n" + s + ")"; } foreach (KeyValuePair <string, string> tt in nlp.propsUsed) { ProcessedBox.Text += "\r\nKey is : " + tt.Key + " value is :" + tt.Value; } DrawImage(nlp.propsUsed, nlp.key); nlp.propsUsed.Clear(); } else { ProcessedBox.Text += "\n Sentiment is negative."; SentiBox.Text = senti.ToString(); } button1.Enabled = true; nlp.key = ""; }
/// <summary> /// stops recording and triggers the corresponding event /// </summary> public void StopRecording() { waveIn.StopRecording(); //waveFile = null; }