// obsluga przycisku nagraj private void buttonRecord_Click(object sender, EventArgs e) { // jesli cos nie jest nagrywane if (wasRecored == false) { //jesli nie znaleziono urządzeń (mikrofonu) if (listBoxDevices.SelectedItems.Count == 0) { return; } // jesli nie została wybrana scieżka do zapisu nagrania wyświetl komunikat if (fileRecordPath == "") { MessageBox.Show("Wybierz miejsce w którym chcesz zapisać plik!"); } else { // nagrywanie do wczesniej wybranego pliku int deviceNumber = listBoxDevices.SelectedIndex; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); // nadanie czestotliwosci nagrywania, i standardu mono czy stereo wynikającego z urządzenia sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveFileWriter = new NAudio.Wave.WaveFileWriter(fileRecordPath, sourceStream.WaveFormat); sourceStream.StartRecording(); buttonRecord.Text = "Nagrywanie..."; wasRecored = true; } } else if (wasRecored == true) // jesli jest już coś nagrywane to zatrzymaj obecne nagrywanie i zmien tekst na przyciskach { if (soundOut != null) { soundOut.Stop(); soundOut.Dispose(); soundOut = null; buttonRecord.Text = "Nagraj"; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; buttonRecord.Text = "Nagraj"; } if (waveFileWriter != null) { waveFileWriter.Dispose(); waveFileWriter = null; buttonRecord.Text = "Nagraj"; } labelRecording.Text = ""; } }
private void btnStopSpeech_Click(object sender, EventArgs e) { this.micClient.EndMicAndRecognition(); //this.micClient = null; boxSpeech.Text = ""; btnSpeech.Enabled = true; if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } string _selectedFile = "C:\\Users\\Mac\\Desktop\\check.wav"; if (identify) { identifySpeaker(_selectedFile); } }
//this ensures that all resources are correctly closed and disposed of when recording is stopped private void stopRecording() { if (sourceStream != null) { if (waveOut != null) //stops sound from playing and disposes { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) //stops sourcestream from recording and disposes { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } isRecording = false; return; } }
/// <summary> /// saves a recording to a temporary file; a new input stream is used /// </summary> /// <param name="filename"></param> public void Record(String filename) { waveFile = new NAudio.Wave.WaveFileWriter(filename, new NAudio.Wave.WaveFormat(8000, 1)); // unbound any resources flowing to the volume control if (waveIn != null) { waveIn.Dispose(); } waveIn = new NAudio.Wave.WaveIn(); waveIn.WaveFormat = new NAudio.Wave.WaveFormat(8000, 1); waveIn.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(waveIn_DataAvailableRecording); waveIn.RecordingStopped += new EventHandler <NAudio.Wave.StoppedEventArgs>(waveIn_RecordingStopped); waveIn.StartRecording(); }
private void button3_Click(object sender, EventArgs e) { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } }
//Event args to stop recording events private void wavSource_RecordingStop(object sender, NAudio.Wave.StoppedEventArgs e) { if (wavSource != null) { wavSource.Dispose(); wavSource = null; } if (wavFile != null) { wavFile.Dispose(); wavFile = null; } //recBtn.Enabled = true; }
public bool StopRecording() { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } return(true); }
public void StopRecording() { try { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } try { if (sourceStream != null) { //th.Abort(); sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } } catch (Exception e) { Console.WriteLine(e.Message); } try { if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } } catch (Exception e) { Console.WriteLine(e.Message); } } catch (Exception e2) { Console.WriteLine(e2.Message); } }
public void StopRecording() { if (m_oWaveOut != null) { m_oWaveOut.Stop(); m_oWaveOut.Dispose(); m_oWaveOut = null; } if (m_oSourceStream != null) { m_oSourceStream.StopRecording(); m_oSourceStream.Dispose(); m_oSourceStream = null; } if (m_oWaveWriter != null) { m_oWaveWriter.Dispose(); m_oWaveWriter = null; } }
private void StopAudioRecording() { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } }
private void StopRecording() { btnRecord.Content = "Record"; if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); } if (SourceStream != null) { SourceStream.StopRecording(); SourceStream.Dispose(); } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } if (output != null) { if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing || output.PlaybackState == NAudio.Wave.PlaybackState.Paused) { output.Stop(); output.Dispose(); output = null; } } if (stopWatch.IsRunning) { stopWatch.Stop(); currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); //txtBox.Text = currentTime; } else { currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); //txtBox.Text = currentTime; } }
private void btnStop_Click(object sender, RoutedEventArgs e) { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); } if (SourceStream != null) { SourceStream.StopRecording(); SourceStream.Dispose(); } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } if (output != null) { if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing || output.PlaybackState == NAudio.Wave.PlaybackState.Paused) { output.Stop(); output.Dispose(); output = null; } } if (stopWatch.IsRunning) { stopWatch.Stop(); currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); txtBox.Text = currentTime; } else { currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}", 0, 0, 0, 0); txtBox.Text = currentTime; } }
//Stops the recording process used to start translating private void button3_Click(object sender, EventArgs e) { label1.Text = "Stop Recording"; if (wavout != null) { wavout.Stop(); wavout.Dispose(); wavout = null; } if (sourcestream != null) { sourcestream.StopRecording(); sourcestream.Dispose(); sourcestream = null; } if (wavewriter != null) { wavewriter.Dispose(); wavewriter = null; } }
private async void StopRec() { panel1.Visible = false; label1.Visible = false; label2.Visible = true; if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } await Task.Delay(30);//let the wav audio file be created properly. int senti; string line = Recognition(); // line.TrimEnd(' '); line = line + "."; senti = nlp.SentiAnalysis(line); Console.Text = "\n" + line; label2.Visible = false; if (senti >= 2) { SentiBox.Text = senti.ToString(); nlp.SentenceParser(line); string[] depText = nlp.dependency.Split(new string[] { ")," }, StringSplitOptions.None); foreach (string s in depText) { BasicDep.Text += "\r\n" + s + ")"; } foreach (KeyValuePair <string, string> tt in nlp.propsUsed) { ProcessedBox.Text += "\r\nKey is : " + tt.Key + " value is :" + tt.Value; } DrawImage(nlp.propsUsed, nlp.key); nlp.propsUsed.Clear(); } else { ProcessedBox.Text += "\n Sentiment is negative."; SentiBox.Text = senti.ToString(); } button1.Enabled = true; nlp.key = ""; }