public void Dispose()
 {
     if (_waveIn != null)
     {
         _waveIn.StopRecording();
         _waveIn.Dispose();
         _waveIn = null;
     }
     if (_writer != null)
     {
         _writer.Close();
         _writer.Dispose();
         _writer = null;
     }
 }
예제 #2
0
            private void ExportFilter(object sender, EventArgs e)
            {
                SaveFileDialog SaveWave = new SaveFileDialog();

                if (Response.Length < 4)
                {
                    SaveWave.Filter = " Wave Audio (*.wav) |*.wav";
                }
                else
                {
                    SaveWave.Filter = "Extended Wave Audio (*.wavex) |*.wavex";
                }

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(44100, 24, Response.Length));
                    for (int j = 0; j < Response[0].Length; j++)
                    {
                        for (int c = 0; c < Response.Length; c++)
                        {
                            if (j > Response[c].Length - 1)
                            {
                                Writer.WriteSample(0);
                            }
                            else
                            {
                                Writer.WriteSample((float)Response[c][j]);
                            }
                        }
                    }
                    Writer.Close();
                    Writer.Dispose();
                }
            }
            private void Export_Signal_Click(object sender, EventArgs e)
            {
                SaveFileDialog SaveWave = new SaveFileDialog();

                SaveWave.Filter = " Wave Audio (*.wav) |*.wav";

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(44100, 24, result_signals.Length));
                    for (int j = 0; j < result_signals[0].Length; j++)
                    {
                        for (int c = 0; c < result_signals.Length; c++)
                        {
                            if (j > result_signals[0].Length - 1)
                            {
                                Writer.WriteSample(0);
                            }
                            else
                            {
                                Writer.WriteSample((float)result_signals[c][j]);
                            }
                        }
                    }
                    Writer.Close();
                    Writer.Dispose();
                }
            }
예제 #4
0
        // obsluga przycisku nagraj
        private void buttonRecord_Click(object sender, EventArgs e)
        {
            // jesli cos nie jest nagrywane
            if (wasRecored == false)
            {
                //jesli nie znaleziono urządzeń (mikrofonu)
                if (listBoxDevices.SelectedItems.Count == 0)
                {
                    return;
                }

                // jesli nie została wybrana scieżka do zapisu nagrania wyświetl komunikat
                if (fileRecordPath == "")
                {
                    MessageBox.Show("Wybierz miejsce w którym chcesz zapisać plik!");
                }
                else
                {
                    // nagrywanie do wczesniej wybranego pliku
                    int deviceNumber = listBoxDevices.SelectedIndex;

                    sourceStream = new NAudio.Wave.WaveIn();
                    sourceStream.DeviceNumber = deviceNumber;
                    sourceStream.WaveFormat   = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); // nadanie czestotliwosci nagrywania, i standardu mono czy stereo wynikającego z urządzenia

                    sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable);
                    waveFileWriter              = new NAudio.Wave.WaveFileWriter(fileRecordPath, sourceStream.WaveFormat);

                    sourceStream.StartRecording();

                    buttonRecord.Text = "Nagrywanie...";
                    wasRecored        = true;
                }
            }
            else if (wasRecored == true) // jesli jest już coś nagrywane to zatrzymaj obecne nagrywanie i zmien tekst na przyciskach
            {
                if (soundOut != null)
                {
                    soundOut.Stop();
                    soundOut.Dispose();
                    soundOut          = null;
                    buttonRecord.Text = "Nagraj";
                }
                if (sourceStream != null)
                {
                    sourceStream.StopRecording();
                    sourceStream.Dispose();
                    sourceStream      = null;
                    buttonRecord.Text = "Nagraj";
                }
                if (waveFileWriter != null)
                {
                    waveFileWriter.Dispose();
                    waveFileWriter    = null;
                    buttonRecord.Text = "Nagraj";
                }

                labelRecording.Text = "";
            }
        }
예제 #5
0
        } // End Sub Main

        // https://ourcodeworld.com/articles/read/702/how-to-record-the-audio-from-the-sound-card-system-audio-with-c-using-naudio-in-winforms
        // https://stackoverflow.com/questions/18812224/c-sharp-recording-audio-from-soundcard
        static void TestAudioRecording()
        {
            // Define the output wav file of the recorded audio
            string outputFilePath = @"D:\username\Desktop\system_recorded_audio.wav";

            // Redefine the capturer instance with a new instance of the LoopbackCapture class
            NAudio.Wave.WasapiLoopbackCapture CaptureInstance = new NAudio.Wave.WasapiLoopbackCapture();

            // Redefine the audio writer instance with the given configuration
            NAudio.Wave.WaveFileWriter RecordedAudioWriter = new NAudio.Wave.WaveFileWriter(outputFilePath, CaptureInstance.WaveFormat);

            // When the capturer receives audio, start writing the buffer into the mentioned file
            CaptureInstance.DataAvailable += (s, a) =>
            {
                // Write buffer into the file of the writer instance
                RecordedAudioWriter.Write(a.Buffer, 0, a.BytesRecorded);
            };

            // When the Capturer Stops, dispose instances of the capturer and writer
            CaptureInstance.RecordingStopped += (s, a) =>
            {
                RecordedAudioWriter.Dispose();
                RecordedAudioWriter = null;
                CaptureInstance.Dispose();
            };

            // Start audio recording !
            CaptureInstance.StartRecording();


            System.Console.WriteLine(" --- Press any key to stop recording --- ");
            System.Console.ReadKey();
            CaptureInstance.StopRecording();
        } // End Sub Main
 //this ensures that all resources are correctly  closed and disposed of when recording is stopped
 private void stopRecording()
 {
     if (sourceStream != null)
     {
         if (waveOut != null) //stops sound from playing and disposes
         {
             waveOut.Stop();
             waveOut.Dispose();
             waveOut = null;
         }
         if (sourceStream != null) //stops sourcestream from recording and disposes
         {
             sourceStream.StopRecording();
             sourceStream.Dispose();
             sourceStream = null;
         }
         if (waveWriter != null)
         {
             waveWriter.Dispose();
             waveWriter = null;
         }
         isRecording = false;
         return;
     }
 }
예제 #7
0
        private void btnStopSpeech_Click(object sender, EventArgs e)
        {
            this.micClient.EndMicAndRecognition();
            //this.micClient = null;
            boxSpeech.Text    = "";
            btnSpeech.Enabled = true;


            if (waveOut != null)
            {
                waveOut.Stop();
                waveOut.Dispose();
                waveOut = null;
            }
            if (sourceStream != null)
            {
                sourceStream.StopRecording();
                sourceStream.Dispose();
                sourceStream = null;
            }
            if (waveWriter != null)
            {
                waveWriter.Dispose();
                waveWriter = null;
            }

            string _selectedFile = "C:\\Users\\Mac\\Desktop\\check.wav";

            if (identify)
            {
                identifySpeaker(_selectedFile);
            }
        }
 /// <summary>
 /// Writes the wave file.
 /// </summary>
 /// <param name="inputArray">The input array.</param>
 void WriteFile(short[] inputArray, string filePath)
 {
     NAudio.Wave.WaveFormat     waveFormat = new NAudio.Wave.WaveFormat(44100, 16, 1);
     NAudio.Wave.WaveFileWriter writer     = new NAudio.Wave.WaveFileWriter(filePath, waveFormat);
     writer.WriteSamples(inputArray, 0, inputArray.Length);
     writer.Flush();
     writer.Dispose();
 }
예제 #9
0
 private void stopRecordingButton_Click(object sender, EventArgs e)
 {
     if (waveWriter != null)
     {
         messageBox.Text += "Stop recording";
         waveWriter.Dispose();
         waveWriter = null;
     }
 }
예제 #10
0
        //Event args to stop recording events
        private void wavSource_RecordingStop(object sender, NAudio.Wave.StoppedEventArgs e)
        {
            if (wavSource != null)
            {
                wavSource.Dispose();
                wavSource = null;
            }

            if (wavFile != null)
            {
                wavFile.Dispose();
                wavFile = null;
            }
            //recBtn.Enabled = true;
        }
예제 #11
0
        public void StopRecording()
        {
            try
            {
                if (waveOut != null)
                {
                    waveOut.Stop();
                    waveOut.Dispose();
                    waveOut = null;
                }
                try
                {
                    if (sourceStream != null)
                    {
                        //th.Abort();
                        sourceStream.StopRecording();
                        sourceStream.Dispose();
                        sourceStream = null;
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                }

                try
                {
                    if (waveWriter != null)
                    {
                        waveWriter.Dispose();
                        waveWriter = null;
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                }
            }
            catch (Exception e2)
            {
                Console.WriteLine(e2.Message);
            }
        }
예제 #12
0
파일: Recorder.cs 프로젝트: lex4all/lex4all
        /// <summary>
        /// when recording is stopped the current input stream and filewriter are discarded
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void waveIn_RecordingStopped(object sender, NAudio.Wave.StoppedEventArgs e)
        {
            if (waveIn != null)
            {
                waveIn.Dispose();
                waveIn            = new NAudio.Wave.WaveIn();
                waveIn.WaveFormat = new NAudio.Wave.WaveFormat(8000, 1);

                waveIn.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(waveIn_DataAvailableVolume);
                aggregator            = new SampleAggregator();
                waveIn.StartRecording();
            }

            if (waveFile != null)
            {
                waveFile.Dispose();
                waveFile = null;
            }
        }
예제 #13
0
 private void StopAudioRecording()
 {
     if (waveOut != null)
     {
         waveOut.Stop();
         waveOut.Dispose();
         waveOut = null;
     }
     if (sourceStream != null)
     {
         sourceStream.StopRecording();
         sourceStream.Dispose();
         sourceStream = null;
     }
     if (waveWriter != null)
     {
         waveWriter.Dispose();
         waveWriter = null;
     }
 }
예제 #14
0
 public void StopRecording()
 {
     if (m_oWaveOut != null)
     {
         m_oWaveOut.Stop();
         m_oWaveOut.Dispose();
         m_oWaveOut = null;
     }
     if (m_oSourceStream != null)
     {
         m_oSourceStream.StopRecording();
         m_oSourceStream.Dispose();
         m_oSourceStream = null;
     }
     if (m_oWaveWriter != null)
     {
         m_oWaveWriter.Dispose();
         m_oWaveWriter = null;
     }
 }
예제 #15
0
파일: Form2.cs 프로젝트: Roman2017-2018/-9
 private void button3_Click(object sender, EventArgs e)
 {
     if (waveOut != null)
     {
         waveOut.Stop();
         waveOut.Dispose();
         waveOut = null;
     }
     if (sourceStream != null)
     {
         sourceStream.StopRecording();
         sourceStream.Dispose();
         sourceStream = null;
     }
     if (waveWriter != null)
     {
         waveWriter.Dispose();
         waveWriter = null;
     }
 }
예제 #16
0
 private void StopRecording()
 {
     btnRecord.Content = "Record";
     if (waveOut != null)
     {
         waveOut.Stop();
         waveOut.Dispose();
     }
     if (SourceStream != null)
     {
         SourceStream.StopRecording();
         SourceStream.Dispose();
     }
     if (waveWriter != null)
     {
         waveWriter.Dispose();
         waveWriter = null;
     }
     if (output != null)
     {
         if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing || output.PlaybackState == NAudio.Wave.PlaybackState.Paused)
         {
             output.Stop();
             output.Dispose();
             output = null;
         }
     }
     if (stopWatch.IsRunning)
     {
         stopWatch.Stop();
         currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}",
                                     0, 0, 0, 0);
         //txtBox.Text = currentTime;
     }
     else
     {
         currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}",
                                     0, 0, 0, 0);
         //txtBox.Text = currentTime;
     }
 }
예제 #17
0
 private void btnStop_Click(object sender, RoutedEventArgs e)
 {
     if (waveOut != null)
     {
         waveOut.Stop();
         waveOut.Dispose();
     }
     if (SourceStream != null)
     {
         SourceStream.StopRecording();
         SourceStream.Dispose();
     }
     if (waveWriter != null)
     {
         waveWriter.Dispose();
         waveWriter = null;
     }
     if (output != null)
     {
         if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing || output.PlaybackState == NAudio.Wave.PlaybackState.Paused)
         {
             output.Stop();
             output.Dispose();
             output = null;
         }
     }
     if (stopWatch.IsRunning)
     {
         stopWatch.Stop();
         currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}",
                                     0, 0, 0, 0);
         txtBox.Text = currentTime;
     }
     else
     {
         currentTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}",
                                     0, 0, 0, 0);
         txtBox.Text = currentTime;
     }
 }
예제 #18
0
        //Stops the recording process used to start translating
        private void button3_Click(object sender, EventArgs e)
        {
            label1.Text = "Stop Recording";
            if (wavout != null)
            {
                wavout.Stop();
                wavout.Dispose();
                wavout = null;
            }
            if (sourcestream != null)
            {
                sourcestream.StopRecording();
                sourcestream.Dispose();
                sourcestream = null;
            }

            if (wavewriter != null)
            {
                wavewriter.Dispose();
                wavewriter = null;
            }
        }
        public override void HotkeyTriggered()
        {
            if (recorder == null)
            {
                if (audioDevice.Recorders.Count > 0)
                {
                    recorder = audioDevice.Recorders[0];
                }
            }

            if (recorder == null)
            {
                return;
            }

            int count = recorder.RecordedBytes.Count;

            byte[] audioData = recorder.RecordedBytes.Copy();
            NAudio.Wave.WaveFileWriter waveWriter = new NAudio.Wave.WaveFileWriter(ExtraData1, recorder.WaveSource.WaveFormat);
            waveWriter.Write(audioData, 0, count);
            waveWriter.Flush();
            waveWriter.Dispose();
        }
            private void RenderBtn_Click(object sender, System.EventArgs e)
            {
                if ((SelectedSources().Count < 1) || ((string)Receiver_Choice.Text == "No Results Calculated..."))
                {
                    Rhino.RhinoApp.WriteLine("Select Source and Receiver objects to render");
                    return;
                }

                double[] SignalBuffer;
                int SamplesPerSec;
                this.OpenWaveFile(out SamplesPerSec, out SignalBuffer);
                if (!IsRendered())
                {
                    Response = Pach_SP.Expand_Response(Direct_Data, IS_Data, Receiver, (double)(CO_TIME.Value / 1000), SamplesPerSec, int.Parse(Receiver_Choice.Text), SelectedSources(), 24);
                    SrcRendered = new int[SourceList.CheckedIndices.Count];
                    for(int i = 0 ; i < SourceList.CheckedIndices.Count; i++)
                    {
                        SrcRendered[i] = SourceList.CheckedIndices[i];
                    }
                    RecRendered = int.Parse(Receiver_Choice.Text);
                }
                float[] NewSignal = Pach_SP.FFT_Convolution(SignalBuffer, Response);

                SaveFileDialog SaveWave = new SaveFileDialog();
                SaveWave.Filter = " Wave Audio (*.wav) |*.wav";

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(SamplesPerSec, 24, 1));
                    
                    for (int j = 0; j < NewSignal.Length; j++)
                    {
                        Writer.WriteSample(NewSignal[j]);
                    }
                    Writer.Close();
                    Writer.Dispose();
                    System.Media.SoundPlayer Player = new System.Media.SoundPlayer(SaveWave.FileName);
                    Player.Play();
                }
            }
예제 #21
0
 private void Capture_RecordingStopped(object sender, NAudio.Wave.StoppedEventArgs e)
 {
     _waveFileWriter?.Dispose();
     _waveFileWriter = null;
     _capture.Dispose();
 }
예제 #22
0
        private async void StopRec()
        {
            panel1.Visible = false;
            label1.Visible = false;
            label2.Visible = true;
            if (waveOut != null)
            {
                waveOut.Stop();
                waveOut.Dispose();
                waveOut = null;
            }
            if (sourceStream != null)
            {
                sourceStream.StopRecording();
                sourceStream.Dispose();
                sourceStream = null;
            }
            if (waveWriter != null)
            {
                waveWriter.Dispose();
                waveWriter = null;
            }
            await Task.Delay(30);//let the wav audio file be created properly.

            int    senti;
            string line = Recognition();

            // line.TrimEnd(' ');
            line           = line + ".";
            senti          = nlp.SentiAnalysis(line);
            Console.Text   = "\n" + line;
            label2.Visible = false;
            if (senti >= 2)
            {
                SentiBox.Text = senti.ToString();
                nlp.SentenceParser(line);
                string[] depText = nlp.dependency.Split(new string[] { ")," }, StringSplitOptions.None);

                foreach (string s in depText)
                {
                    BasicDep.Text += "\r\n" + s + ")";
                }



                foreach (KeyValuePair <string, string> tt in nlp.propsUsed)
                {
                    ProcessedBox.Text += "\r\nKey is : " + tt.Key + " value is :" + tt.Value;
                }
                DrawImage(nlp.propsUsed, nlp.key);
                nlp.propsUsed.Clear();
            }
            else
            {
                ProcessedBox.Text += "\n Sentiment is negative.";
                SentiBox.Text      = senti.ToString();
            }
            button1.Enabled = true;

            nlp.key = "";
        }
            private void RenderBtn_Click(object sender, System.EventArgs e)
            {
                if (Response == null || Response.Length == 0)
                {
                    Rhino.RhinoApp.WriteLine("No impulse response found to render...");
                    return;
                }

                int SamplesPerSec;
                double[] SignalBuffer;
                OpenWaveFile(out SamplesPerSec, out SignalBuffer);

                float maxvalue = 0;
                //Normalize input signal...
                for (int j = 0; j < SignalBuffer.Length; j++) maxvalue = (float)Math.Max(maxvalue, Math.Abs(SignalBuffer[j]));
                for (int j = 0; j < SignalBuffer.Length; j++) SignalBuffer[j] /= maxvalue;
                //Convert pressure response to a 24-bit dynamic range:
                //double mod24 = Math.Pow(10, -50 / 10);
                //for (int i = 0; i < Response.Length; i++) for(int j = 0; j < Response[i].Length; j++) Response[i][j] *= mod24;

                float[][] NewSignal = new float[(int)Response.Length][];
                for (int i = 0; i < Response.Length; i++)
                {
                    NewSignal[i] = Pach_SP.FFT_Convolution(SignalBuffer, Response[i], 0);
                    for (int j = 0; j < NewSignal[i].Length; j++) NewSignal[i][j] *= 1E-5f;
                }

                SrcRendered = new int[SourceList.CheckedIndices.Count];
                for (int j = 0; j < SourceList.CheckedIndices.Count; j++)
                {
                    SrcRendered[j] = SourceList.CheckedIndices[j];
                }
                RecRendered = int.Parse(Receiver_Choice.Text);
                SFreq_Rendered = SamplesPerSec;

                SaveFileDialog SaveWave = new SaveFileDialog();
                if (NewSignal.Length < 4)
                {
                    SaveWave.Filter = " Wave Audio (*.wav) |*.wav";
                }
                else 
                {
                    SaveWave.Filter = "Extended Wave Audio (*.wavex) |*.wavex";
                }

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    if (Response == null || Response.Length == 0)
                    {
                        Rhino.RhinoApp.WriteLine("No impulse response found to render...");
                        return;
                    }
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(SamplesPerSec, 24, NewSignal.Length));

                    for (int j = 0; j < NewSignal[0].Length; j++)
                    {
                        for (int i = 0; i < Channel_View.Items.Count; i++) Writer.WriteSample(NewSignal[i][j]);
                    }
                    Writer.Close();
                    Writer.Dispose();
                    System.Media.SoundPlayer Player = new System.Media.SoundPlayer(SaveWave.FileName);
                    Player.Play();
                }
            }
            private void ExportFilter(object sender, EventArgs e)
            {
                SaveFileDialog SaveWave = new SaveFileDialog();
                if (Response.Length < 4)
                {
                    SaveWave.Filter = " Wave Audio (*.wav) |*.wav";
                }
                else 
                {
                    SaveWave.Filter = "Extended Wave Audio (*.wavex) |*.wavex";
                }

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(44100, 24, Response.Length));
                    for (int j = 0; j < Response[0].Length; j++)
                    {
                        for (int c = 0; c < Response.Length; c++) if (j > Response[c].Length - 1) Writer.WriteSample(0); else Writer.WriteSample((float)Response[c][j]);
                    }
                    Writer.Close();
                    Writer.Dispose();
                }
            }
예제 #25
0
            private void RenderBtn_Click(object sender, System.EventArgs e)
            {
                if (Response == null || Response.Length == 0)
                {
                    Rhino.RhinoApp.WriteLine("No impulse response found to render...");
                    return;
                }

                int SamplesPerSec;

                double[] SignalBuffer;
                OpenWaveFile(out SamplesPerSec, out SignalBuffer);

                float maxvalue = 0;

                //Normalize input signal...
                for (int j = 0; j < SignalBuffer.Length; j++)
                {
                    maxvalue = (float)Math.Max(maxvalue, Math.Abs(SignalBuffer[j]));
                }
                for (int j = 0; j < SignalBuffer.Length; j++)
                {
                    SignalBuffer[j] /= maxvalue;
                }
                //Convert pressure response to a 24-bit dynamic range:
                //double mod24 = Math.Pow(10, -50 / 10);
                //for (int i = 0; i < Response.Length; i++) for(int j = 0; j < Response[i].Length; j++) Response[i][j] *= mod24;

                float[][] NewSignal = new float[(int)Response.Length][];
                for (int i = 0; i < Response.Length; i++)
                {
                    NewSignal[i] = Pach_SP.FFT_Convolution(SignalBuffer, Response[i], 0);
                    for (int j = 0; j < NewSignal[i].Length; j++)
                    {
                        NewSignal[i][j] *= 1E-5f;
                    }
                }

                SrcRendered = new int[SourceList.CheckedIndices.Count];
                for (int j = 0; j < SourceList.CheckedIndices.Count; j++)
                {
                    SrcRendered[j] = SourceList.CheckedIndices[j];
                }
                RecRendered    = int.Parse(Receiver_Choice.Text);
                SFreq_Rendered = SamplesPerSec;

                SaveFileDialog SaveWave = new SaveFileDialog();

                if (NewSignal.Length < 4)
                {
                    SaveWave.Filter = " Wave Audio (*.wav) |*.wav";
                }
                else
                {
                    SaveWave.Filter = "Extended Wave Audio (*.wavex) |*.wavex";
                }

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    if (Response == null || Response.Length == 0)
                    {
                        Rhino.RhinoApp.WriteLine("No impulse response found to render...");
                        return;
                    }
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(SamplesPerSec, 24, NewSignal.Length));

                    for (int j = 0; j < NewSignal[0].Length; j++)
                    {
                        for (int i = 0; i < Channel_View.Items.Count; i++)
                        {
                            Writer.WriteSample(NewSignal[i][j]);
                        }
                    }
                    Writer.Close();
                    Writer.Dispose();
                    System.Media.SoundPlayer Player = new System.Media.SoundPlayer(SaveWave.FileName);
                    Player.Play();
                }
            }
            private void saveVRSpectraToolStripMenuItem_Click(object sender, EventArgs e)
            {
                SaveFileDialog SaveWave = new SaveFileDialog();
                SaveWave.Filter = " Pachyderm VR (*.pacvr) |*.pacvr";
                if (SaveWave.ShowDialog() != DialogResult.OK) return;

                System.IO.StreamWriter sw = new System.IO.StreamWriter(System.IO.File.Open(SaveWave.FileName, System.IO.FileMode.Create));

                //A new standard...
                //1. Write pachyderm version
                sw.Write(PachydermAc_PlugIn.Instance.Version);

                double[] dresponse = Pach_SP.Expand_Response(Direct_Data, IS_Data, Receiver, (double)CO_TIME.Value / 1000, SampleRate, 0, new List<int>{0}, 24);//RecID, SrcIDs,
                float[] Response = new float[dresponse.Length];

                for (int i = 0; i < dresponse.Length; i++)
                {
                    Response[i] = (float)dresponse[i];
                }

                NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(44100, 24, 1));//, 44100, 1, 16
                for (int j = 0; j < Response.Length; j++)
                {
                    Writer.WriteSample(Response[j]);
                }
                Writer.Close();
                Writer.Dispose();
            }
            public void Save_IR(List<int> SrcIDs, int RecID, string Path)
            {
                double[] dresponse = Pach_SP.Expand_Response(Direct_Data, IS_Data, Receiver, (double)CO_TIME.Value / 1000, SampleRate, RecID, SrcIDs, 24);
                float[] Response = new float[dresponse.Length];

                for (int i = 0; i < dresponse.Length; i++)
                {
                    Response[i] = (float)dresponse[i];
                }

                NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(Path, new NAudio.Wave.WaveFormat(44100, 24, 1));
                for (int j = 0; j < Response.Length; j++)
                {
                    Writer.WriteSample(Response[j]);
                }
                Writer.Close();
                Writer.Dispose();
            }
            private void Export_Signal_Click(object sender, EventArgs e)
            {
                SaveFileDialog SaveWave = new SaveFileDialog();

                SaveWave.Filter = " Wave Audio (*.wav) |*.wav";

                if (SaveWave.ShowDialog() == DialogResult.OK)
                {
                    NAudio.Wave.WaveFileWriter Writer = new NAudio.Wave.WaveFileWriter(SaveWave.FileName, new NAudio.Wave.WaveFormat(44100, 24, result_signals.Length));
                    for (int j = 0; j < result_signals[0].Length; j++)
                    {
                        for (int c = 0; c < result_signals.Length; c++) if (j > result_signals[0].Length - 1) Writer.WriteSample(0); else Writer.WriteSample((float)result_signals[c][j]);
                    }
                    Writer.Close();
                    Writer.Dispose();
                }
            }