//Кнопка "Запись" private void button_rec_Click(object sender, EventArgs e) { button_stop.Enabled = true; timer.Start(); ind = 1; try { waveIn = new WaveIn(); waveIn.DeviceNumber = 0;//Дефолтное устройство для записи (если оно имеется) waveIn.DataAvailable += waveIn_DataAvailable;//Прикрепляем к событию DataAvailable обработчик, возникающий при наличии записываемых данных waveIn.RecordingStopped += waveIn_RecordingStopped;//Прикрепляем обработчик завершения записи waveIn.WaveFormat = new WaveFormat(8000, 1);//Формат wav-файла - принимает параметры - частоту дискретизации и количество каналов(здесь mono) writer = new WaveFileWriter(outputFilename, waveIn.WaveFormat);//Инициализируем объект WaveFileWriter waveIn.StartRecording();//Начало записи button_play.Enabled = false; button_rec.Enabled = false; numeric.Enabled = false; } catch (Exception ex) { button_play.Enabled = true; button_rec.Enabled = true; numeric.Enabled = true; MessageBox.Show(ex.Message); } }
public Recorder() { int waveInDevices = WaveIn.DeviceCount; //for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) //{ // WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice); // comboBox1.Items.Add(string.Format("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels)); //} waveIn = new WaveIn(); waveIn.DeviceNumber = 0; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.RecordingStopped += waveIn_RecordingStopped; int sampleRate = 16000; // 16 kHz int channels = 1; // mono int bits = 16; recordingFormat = new WaveFormat(sampleRate, bits, channels); waveIn.WaveFormat = recordingFormat; string path = "C:\\temp"; if( !Directory.Exists(path) ) { Directory.CreateDirectory(path); } TempWavFileName = String.Format("{0}\\{1}.wav", path, Guid.NewGuid().ToString()); writer = new WaveFileWriter(TempWavFileName, recordingFormat); }
public void InitializeStream() { sourceStream = new WaveIn(); sourceStream.BufferMilliseconds = 50; sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = new WaveFormat(44100, 16, WaveIn.GetCapabilities(0).Channels); }
//TODO wrap WaveIn to allow DI public void Initialise(WaveFormat format, WaveIn driver) { if (driver == null) { throw new ArgumentNullException("driver", "Must specify a WaveIn device instance"); } if (format == null) { throw new ArgumentNullException("format", "Must specify an audio format"); } this.driver = driver; driver.DataAvailable += device_DataAvailable; var caps = WaveIn.GetCapabilities(driver.DeviceNumber); driver.WaveFormat = format; device = new WaveInDeviceData { Driver = driver, Name = caps.ProductName, Channels = caps.Channels, Buffers = new float[caps.Channels][] }; Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, device.Channels); formatPerLine = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, 1); mapInputs(device.Channels); }
void StartEncoding() { _startTime = DateTime.Now; _bytesSent = 0; _segmentFrames = 960; _encoder = new OpusEncoder(48000, 1, OpusNet.OpusApplication.Voip); _encoder.Bitrate = 8192; _decoder = new OpusDecoder(48000, 1); _bytesPerSegment = _encoder.FrameByteCount(_segmentFrames); _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()); _waveIn.BufferMilliseconds = 50; _waveIn.DeviceNumber = comboBox1.SelectedIndex; _waveIn.DataAvailable += _waveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(48000, 16, 1); _playBuffer = new BufferedWaveProvider(new WaveFormat(48000, 16, 1)); _waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback()); _waveOut.DeviceNumber = comboBox2.SelectedIndex; _waveOut.Init(_playBuffer); _waveOut.Play(); _waveIn.StartRecording(); if (_timer == null) { _timer = new Timer(); _timer.Interval = 1000; _timer.Tick += _timer_Tick; } _timer.Start(); }
private IWaveIn CreateWaveInDevice() { IWaveIn newWaveIn; if (radioButtonWaveIn.Checked) { newWaveIn = new WaveIn(); newWaveIn.WaveFormat = new WaveFormat(8000, 1); } else if (radioButtonWaveInEvent.Checked) { newWaveIn = new WaveInEvent(); newWaveIn.WaveFormat = new WaveFormat(8000, 1); } else if (radioButtonWasapi.Checked) { // can't set WaveFormat as WASAPI doesn't support SRC var device = (MMDevice) comboWasapiDevices.SelectedItem; newWaveIn = new WasapiCapture(device); } else { // can't set WaveFormat as WASAPI doesn't support SRC newWaveIn = new WasapiLoopbackCapture(); } newWaveIn.DataAvailable += OnDataAvailable; newWaveIn.RecordingStopped += OnRecordingStopped; return newWaveIn; }
private void Form1_Load(object sender, EventArgs e) { byte[] apk, ask, bpk, bsk; NaClClient.CreateKeys(out apk, out ask); NaClClient.CreateKeys(out bpk, out bsk); var hasher = System.Security.Cryptography.SHA256.Create(); _clientA = NaClClient.Create(apk, ask, bpk); _clientB = NaClClient.Create(bpk, bsk, apk); _sw = new Stopwatch(); _sw.Start(); _wave = new WaveIn(this.Handle); _wave.WaveFormat = new WaveFormat(12000, 8, 1); _wave.BufferMilliseconds = 100; _wave.DataAvailable += _wave_DataAvailable; _wave.StartRecording(); _playback = new BufferedWaveProvider(_wave.WaveFormat); _waveOut = new WaveOut(); _waveOut.DesiredLatency = 100; _waveOut.Init(_playback); _waveOut.Play(); }
public SequenceRecorder(Conductor cond, KeyboardConfiguration key) { conductor = cond; keyboard = key; InitializeComponent(); int waveInDevices = NAudio.Wave.WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { NAudio.Wave.WaveInCapabilities deviceInfo = NAudio.Wave.WaveIn.GetCapabilities(waveInDevice); microphoneList.Items.Add(waveInDevice + ": " + deviceInfo.ProductName); } microphoneList.SelectedIndex = 0; sensitivity.SelectedIndex = 0; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels); bytesPerChannel = (sourceStream.WaveFormat.BitsPerSample / 8); bytesPerSample = bytesPerChannel * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
// Method needed to cleanup outputStream and waveOut file private void DisposeWave() { if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } if (waveOut != null) { if (waveOut.PlaybackState == NAudio.Wave.PlaybackState.Playing) { waveOut.Stop(); } waveOut.Dispose(); waveOut.Stop(); waveOut = null; } if (waveReader != null) { waveReader.Dispose(); waveReader.Close(); waveReader = null; } }
private void button5_Click(object sender, EventArgs e) { if (SourceList.SelectedItems.Count == 0) { return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav|*.wav;"; if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } int deviceNumber = SourceList.SelectedItems[0].Index; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat); sourceStream.StartRecording(); }
private void startRecordSound() { int deviceNumber = devices_cbx.SelectedIndex; int sampleRate = devices_cbx.SelectedIndex; if (sampleRate >= 0 && deviceNumber >= 0 && sampleRate < Constant.TextSampleRate.Count <string>()) { sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(Constant.SampleRate[sampleRate], NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); //NAudio.Wave.WaveInProvider waveIn = null; //waveIn = new NAudio.Wave.WaveInProvider(sourceStream); pathFile = VCDir.Instance.PathWaveFile; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(pathFile, sourceStream.WaveFormat); sourceStream.StartRecording(); // waveOut.Play(); } else { MessageBox.Show(""); return; } }
//Starts recording private void startRecording() { ReadAudioForm3.backButton.Enabled = false; ReadAudioForm3.submitButton.Enabled = false; wavSource = new WaveIn(); wavSource.DataAvailable += new EventHandler <WaveInEventArgs>(wavSource_DataAvailable); wavSource.RecordingStopped += new EventHandler <StoppedEventArgs>(wavSource_RecordingStopped); //wavSource.WaveFormat = new WaveFormat(44100, 1); wavSource.WaveFormat = new WaveFormat(8000, 16, 1); String filename = @"C:\Pres_Proto\V2\MetroFrameworkDLLExample\RecordWAV\" + lvlStr + ".wav"; wavFile = new WaveFileWriter(filename, wavSource.WaveFormat); canvasHeight = waveCanvas.Height; canvasWidth = waveCanvas.Width; polyLine = new Polyline(); polyLine.Stroke = Brushes.Black; polyLine.StrokeThickness = 1; polyLine.Name = "waveform"; polyLine.MaxHeight = canvasHeight - 4; polyLine.MaxWidth = canvasWidth - 4; polyHeight = polyLine.MaxHeight; polyWidth = polyLine.MaxWidth; counter = 0; dispPoints = new Queue <Point>(); totalBytes = new List <byte>(); dispShots = new Queue <Int32>(); wavSource.StartRecording(); }
// this gets called when the panel is loaded into the form private void ilPanel1_Load(object sender, EventArgs e) { m_shutdown = false; // setup the scene ilPanel1.Scene.Add(new ILPlotCube(twoDMode: false) { Children = { // create two line plots: the first is used to display the data itself ... new ILLinePlot(0, DefaultLinePlotTag, Color.Magenta, lineWidth: 1), // .. the second is used for marking magnitude peaks, it gets the line hidden new ILLinePlot(0, DefaultMarkerPlotTag, markerStyle:MarkerStyle.Square) { Line = { Visible = false }} }, // we want both axes in logarithmic scale ScaleModes = { XAxisScale = AxisScale.Logarithmic, YAxisScale = AxisScale.Logarithmic }, // configure axis labels Axes = { XAxis = { Label = { Text = "Frequency [1/\\omega]" }, LabelPosition = new Vector3(1, 1, 0) }, YAxis = { Label = { Text = "Magnitude [dB]" }, LabelPosition = new Vector3(1, 1, 0), LabelAnchor = new PointF(1,0) } } }); // setup audio stream (this is not related to ILNumerics but to the NAudio helper lib) m_waveInStream = new WaveIn(); m_waveInStream.WaveFormat = new WaveFormat(m_sampFreq, m_bitRate, 1); // 1: mono m_waveInStream.DeviceNumber = 0; m_waveInStream.BufferMilliseconds = (int)(m_fftlen / (float)m_sampFreq * 1010); // roughly one buffersize m_waveInStream.DataAvailable += new EventHandler<WaveInEventArgs>(waveInStream_DataAvailable); try { m_waveInStream.StartRecording(); } catch (NAudio.MmException exc) { // when no device exists or no microphone is plugged in, an exception will be thrown here MessageBox.Show("Error initializing audio device. Make sure that a default recording device is available!" + Environment.NewLine + "Error details:" + exc.Message); } }
private void Stoprecording() { if (m_timer != null) { m_timer.Stop(); } { if (sourceStream != null) { if (waveOut != null) //stops sound from playing and disposes { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) //stops sourcestream from recording and disposes { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } return; } } }
// NAudio.Wave.WaveStream stream = null; private void button2_Click(object sender, EventArgs e) { if (listView1.SelectedItems.Count == 0) return; int deviceNumber = listView1.SelectedItems[0].Index; //waveOut = new NAudio.Wave.WaveOut(); ; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(48000, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); sourceStream.DataAvailable += new EventHandler<WaveInEventArgs>(sourceStream_DataAvailable); //waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat); sourceStream.BufferMilliseconds = 100; //wavebuffer = new NAudio.Wave.WaveBuffer(); //bwp = new NAudio.Wave.BufferedWaveProvider(sourceStream.WaveFormat); // bwp.DiscardOnBufferOverflow = true; NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); waveOut = new NAudio.Wave.DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); //waveOut.Init(bwp); waveOut.Play(); // sourceStream.StopRecording(); // Start(sender,e); timer1.Enabled=true; ++count; }
private void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec) { waveIn = new WaveIn(); waveIn.BufferMilliseconds = 50; waveIn.DeviceNumber = inputDeviceNumber; waveIn.WaveFormat = codec.RecordFormat; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.StartRecording(); udpSender = new UdpClient(); udpListener = new UdpClient(); // To allow us to talk to ourselves for test purposes: // http://stackoverflow.com/questions/687868/sending-and-receiving-udp-packets-between-two-programs-on-the-same-computer udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true); udpListener.Client.Bind(endPoint); udpSender.Connect(endPoint); waveOut = new WaveOut(); waveProvider = new BufferedWaveProvider(codec.RecordFormat); waveOut.Init(waveProvider); waveOut.Play(); connected = true; var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint }; ThreadPool.QueueUserWorkItem(ListenerThread, state); }
private void waveBtn_Click(object sender, EventArgs e) { if (sourceList.SelectedItems.Count == 0) { return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav)|*.wav;"; // jesli cos sie nie zgadza sie w oknie dialogowym - return if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } int deviceNumber = sourceList.SelectedItems[0].Index; sourceStream = new NAudio.Wave.WaveIn(); // przechowuje to co wchodzi sourceStream.DeviceNumber = deviceNumber; // z danego urzadzenia // format strumienia sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); // kiedy jest mozliwe pobranie nowych danych - pobierz ze strumienia sourceStream.DataAvailable += new EventHandler <WaveInEventArgs>(sourceStream_DataAvailable); // zapisz plik o takiej nazwie w formacie wav waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat); sourceStream.StartRecording(); }
public SequenceRecorder(Conductor cond, KeyboardConfiguration key) { conductor = cond; keyboard = key; InitializeComponent(); int waveInDevices = NAudio.Wave.WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { NAudio.Wave.WaveInCapabilities deviceInfo = NAudio.Wave.WaveIn.GetCapabilities(waveInDevice); microphoneList.Items.Add(waveInDevice+": "+deviceInfo.ProductName); } microphoneList.SelectedIndex = 0; sensitivity.SelectedIndex = 0; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels); bytesPerChannel = (sourceStream.WaveFormat.BitsPerSample / 8); bytesPerSample = bytesPerChannel * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
public void Start() { if (WaveIn.DeviceCount < 1) throw new Exception("Insufficient input device(s)!"); if (WaveOut.DeviceCount < 1) throw new Exception("Insufficient output device(s)!"); frame_size = toxav.CodecSettings.audio_sample_rate * toxav.CodecSettings.audio_frame_duration / 1000; toxav.PrepareTransmission(CallIndex, false); WaveFormat format = new WaveFormat((int)toxav.CodecSettings.audio_sample_rate, (int)toxav.CodecSettings.audio_channels); wave_provider = new BufferedWaveProvider(format); wave_provider.DiscardOnBufferOverflow = true; wave_out = new WaveOut(); //wave_out.DeviceNumber = config["device_output"]; wave_out.Init(wave_provider); wave_source = new WaveIn(); //wave_source.DeviceNumber = config["device_input"]; wave_source.WaveFormat = format; wave_source.DataAvailable += wave_source_DataAvailable; wave_source.RecordingStopped += wave_source_RecordingStopped; wave_source.BufferMilliseconds = (int)toxav.CodecSettings.audio_frame_duration; wave_source.StartRecording(); wave_out.Play(); }
public static void RecordSound(string name) { int waveDeviceCount = WaveIn.DeviceCount; //detect presence of recording hardware if (waveDeviceCount > 0) { inputDevice = 0; } else { MessageBox.Show("No recording hardware detected", "iMasomoAdmin", MessageBoxButton.OK, MessageBoxImage.Error); return; } wordName = name; try { waveIn = new WaveIn(); waveIn.DeviceNumber = inputDevice; waveIn.WaveFormat = new NAudio.Wave.WaveFormat(44100, WaveIn.GetCapabilities(inputDevice).Channels); //in the presence of incoming data, write the data to a buffer waveIn.DataAvailable += waveIn_DataAvailable; waveWriter = new WaveFileWriter(Environment.CurrentDirectory + @"\Media\" + wordName + ".wav", waveIn.WaveFormat); waveIn.StartRecording(); } catch(Exception ex) { MessageBox.Show(ex.Message); } }
void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec) { source = new WaveIn(); source.BufferMilliseconds = 50; source.DeviceNumber = inputDeviceNumber; source.WaveFormat = codec.RecordFormat; source.DataAvailable += waveIn_DataAvailable; source.StartRecording(); sendVoice = new UdpClient(); sendVoice.EnableBroadcast = true; receiveVoice = new UdpClient(); receiveVoice.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Broadcast, true); receiveVoice.Client.Bind(endPoint); sendVoice.Connect(endPoint); waveOut = new WaveOut(); waveProvider = new BufferedWaveProvider(codec.RecordFormat); waveOut.Init(waveProvider); waveOut.Play(); connected = true; var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint }; ThreadPool.QueueUserWorkItem(ListenerThread, state); }
private void waveIn_RecordingStopped(object sender, StoppedEventArgs e) { waveIn.Dispose(); waveIn = null; writer.Close(); writer = null; }
// Method that stops recording private void stop_Click(object sender, EventArgs e) { Recognition_Form form = new Recognition_Form(null, null, null); form.Show(); this.microResultLabel = form.ms_result; this.mfcc_result = form.mfcc_result; this.lpc_result = form.lpc_result; if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } openWav(wavSaveName); recording_timer.Stop(); wavSaveName = null; System.Threading.Tasks.Task.Run(() => { processWavFile(algo); }); // processWavFile(algo); }
private void timer1_Tick(object sender, EventArgs e) { if (conductor == null) { return; } if (skipCheck > 0) { --skipCheck; return; } if (reAcquireMicrophone) { reAcquireMicrophone = false; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); } /* * if (reAcquireMicrophone||(!directInput.IsDeviceAttached(MicrophoneGuid))) * { * if (Microphone != null) * Microphone.Unacquire(); * * IList<DeviceInstance> deviceInstance = directInput.GetDevices(DeviceType.Microphone, DeviceEnumerationFlags.AllDevices); * if (deviceInstance.Count > 0) * MicrophoneGuid = deviceInstance[0].InstanceGuid; * else * { * // don't check for another second * skipCheck = 100; * return; * } * * reAcquireMicrophone = false; * * // Instantiate the Microphone * Microphone = new Microphone(directInput, MicrophoneGuid); * * // Set BufferSize in order to use buffered data. * Microphone.Properties.BufferSize = 128; * * // Acquire the Microphone * Microphone.Acquire(); * * // allow values to stabilize before using them! * skipCount = 50; * } */ }
public AudioRecorder(int microphone) { waveIn = new WaveIn(); waveIn.DeviceNumber = microphone; waveIn.WaveFormat = new WaveFormat(44100, 1); bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat); writer = new WaveFileWriter(Settings.Default.tempSoundLocation, waveIn.WaveFormat); }
public static void PrepareMic() { WaveIn waveIn = new WaveIn(); waveIn.WaveFormat = new WaveFormat(SAMPLE_FREQ, 16, 1); waveIn.StartRecording(); waveIn.StopRecording(); waveIn.Dispose(); }
//Event arguments to stop recording private void wavSource_RecordingStopped(object sender, EventArgs e) { wavSource.Dispose(); wavSource = null; wavFile.Close(); wavFile.Dispose(); wavFile = null; }
public void StartListener() { waveListener = new WaveIn(); waveListener.DeviceNumber = 0; waveListener.DataAvailable += ListenerDataAvailable; waveListener.WaveFormat = new WaveFormat(32000, 1); waveListener.StartRecording(); }
public void stop() { waveInStream.StopRecording(); waveInStream.Dispose(); waveInStream = null; writer.Close(); writer = null; }
public void CanGetWaveInMixerLine() { using (WaveIn waveIn = new WaveIn()) { MixerLine line = waveIn.GetMixerLine(); //Debug.WriteLine(String.Format("Mic Level {0}", level)); } }
public MainWindow() { InitializeComponent(); this.WindowStartupLocation = System.Windows.WindowStartupLocation.CenterScreen; int waveInDevices = WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice); Console.WriteLine("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels); } waveIn = new WaveIn(); waveIn.BufferMilliseconds = 47*buffersize/2048; waveIn.DeviceNumber = 0; waveIn.WaveFormat = new WaveFormat(44100, 32, 1); waveIn.DataAvailable += waveIn_DataAvailable; try { waveIn.StartRecording(); } catch(NAudio.MmException e) { Console.WriteLine(e.ToString() + "\nPlug in a microphone!"); } bin = new int[buffersize * 2]; sampledata = new float[buffersize * 2]; priori = new double[buffersize * 2]; channelLabel = new int[1]; channelLabel[0] = 1; velocity = new int[1]; velocity[0] = 0; for (int i = 0; i < buffersize * 2; i++) { bin[i] = i; sampledata[i] = 0; priori[i] = 0; } chart1.Viewport.Visible = new DataRect(0, -1.0, buffersize * 2, 2.0); chart2.Viewport.Visible = new DataRect(1620, 0, 280, 110); bins = new EnumerableDataSource<int>(bin); bins.SetXMapping(x => x); rawIn = new EnumerableDataSource<float>(sampledata); rawIn.SetYMapping(y => y); CompositeDataSource comp1 = new CompositeDataSource(bins, rawIn); chart1.AddLineGraph(comp1); CompositeDataSource comp2 = new CompositeDataSource(bins, rawIn); chart2.AddLineGraph(comp2); }
public void StartAudioIn() { m_WaveIn = new WaveIn(); m_WaveIn.DataAvailable += new EventHandler<WaveInEventArgs>(WaveIn_DataAvailable); //m_WaveIn.WaveFormat = new WaveFormat(44100, 32, 2); m_WaveIn.WaveFormat = new WaveFormat(44100, 16, 1); m_WaveIn.StartRecording(); }
private void InitialiseMicrophone() { WaveIn waveIn = new WaveIn(); waveIn.DeviceNumber = 0; waveIn.WaveFormat = new WaveFormat(44100, 1); AudioInStream stream = new AudioInStream(waveIn, new MemoryStream()); _session.RegisterInputAudio(stream); stream.StartRecording(); }
public void Record(string fileName, int volume = 100) { _waveIn = new WaveIn { WaveFormat = new WaveFormat() }; _writer = new WaveFileWriter(fileName, _waveIn.WaveFormat); TrySetVolumeControl(_waveIn.GetMixerLine(), volume); _waveIn.DataAvailable += new_dataAvailable; _waveIn.StartRecording(); }
//this method sets up the sourcestream, the waveout and the buffer where our sourcestream data will be sent. //the devices are set up either as chosen in the settings panel or as the first(and usually only) availuable devices as default private void InitializeSound() { sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = setting.deviceNumber; // input device number = 0 as default to prevent error. thats also usually the mic when using my PC. need to ensure its the same for my laptop for presentation sourceStream.WaveFormat = setting.format; sourceStream.DataAvailable += waveIn_DataAvailable; // calls method to control what happens to data as it comes in waveOut = new NAudio.Wave.DirectSoundOut(); //creates wave output device buffer = new BufferedWaveProvider(sourceStream.WaveFormat); //initialises buffer in the same format as the wavestream waveOut.Init(buffer); //connects the buffer with the output device }
public Recorder() { wavein = new WaveIn(); wavein.DeviceNumber = 0; wavein.WaveFormat = new WaveFormat(); devEnum = new MMDeviceEnumerator(); defaultDevice = devEnum.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Multimedia); wavein.DataAvailable += Wavein_DataAvailable; checkFolders(); }
public void StartRecording() { waveInStream = new WaveIn(); waveInStream.WaveFormat = new WaveFormat(44100, 16, 1); writer = new WaveFileWriter(FileName, waveInStream.WaveFormat); waveInStream.DataAvailable += WaveInStream_DataAvailable; waveInStream.StartRecording(); }
private void recordBtn_Click(object sender, EventArgs e) { if (setMode) { try { String filename = "Class" + LoginForm.classSec + "_kidWordAudio/test.wav"; recordBtn.Text = "STOP"; wavSource = new NAudio.Wave.WaveIn(); wavSource.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1); wavSource.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(wavSource_DataAvail); wavSource.RecordingStopped += new EventHandler <NAudio.Wave.StoppedEventArgs>(wavSource_RecordingStop); wavFile = new NAudio.Wave.WaveFileWriter(filename, wavSource.WaveFormat); wavSource.StartRecording(); setMode = false; } catch (Exception) { throw; } } else { //When you press "STOP", it automatically compares wavSource.StopRecording(); String recordWAV_file = "Class" + LoginForm.classSec + "_kidWordAudio/test.wav"; String refWAV_file = "Class" + LoginForm.classSec + "_kidWordAudio/" + levels[curPos] + ";.wav"; java.io.File f1 = new java.io.File(recordWAV_file); java.io.File f2 = new java.io.File(refWAV_file); if (!f1.exists() || !f2.exists()) { MessageBox.Show("WARNING: One of the files might be missing!"); } else { float compute_Result = compareAudio(recordWAV_file, refWAV_file); if (compute_Result >= 10.0) { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n You Win !"); } else { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n Try Again !"); } } recordBtn.Text = "RECORD"; setMode = true; } }
public void StartListening() { if (!IsActive) { _waveIn = new WaveIn {DeviceNumber = Options.DeviceNumber}; _waveIn.DataAvailable += WaveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(Options.MicrophoneBitRate, 16, 1); _waveIn.StartRecording(); OnAfterStartListening(); } }
public void InitializeNAudio(int device) { waveIn = new WaveIn(); waveIn.DeviceNumber = device; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.RecordingStopped += new EventHandler<StoppedEventArgs>(waveIn_RecordingStopped); int sampleRate = 8000; // 8 kHz int channels = 1; // mono waveIn.WaveFormat = new WaveFormat(sampleRate, channels); waveIn.StartRecording(); }
public void Stop() { if (_waveIn != null) { _waveIn.StopRecording(); _waveIn.Dispose(); _waveIn = null; _writer.Close(); _writer = null; } }
private void DisposeWaveIn() { if (waveIn != null) { waveIn.StopRecording(); waveIn.Dispose(); waveIn = null; buffer = null; writeIndex = 0; } }
public void RestartRecording() { if (WaveInEvent.DeviceCount > 0) { waveIn = new WaveIn(); int inputDeviceNumber = WaveInEvent.DeviceCount - 1; waveIn.DeviceNumber = inputDeviceNumber; waveIn.BufferMilliseconds = 10; waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(waveIn_DataAvailable); waveIn.StartRecording(); } }
private void btn_RECSTOP_Click(object sender, EventArgs e) { if (!isRecording) { if (cmb_InputsList.SelectedItem == null) { MessageBox.Show("Error! \n No Input Selected, Please select an Audio Input before recording"); return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File(*.wav)|*.wav;"; if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } else { lst_samplelist.Items.Add(save.FileName); } int deviceNumber = cmb_InputsList.SelectedIndex; InputStream = new NAudio.Wave.WaveIn(); InputStream.DeviceNumber = deviceNumber; InputStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); InputStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(InputStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, InputStream.WaveFormat); InputStream.StartRecording(); btn_RECSTOP.Text = "STOP"; isRecording = true; } else { if (InputStream != null) { InputStream.StopRecording(); InputStream.Dispose(); InputStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } btn_RECSTOP.Text = "REC"; isRecording = false; } }
private void waveSource_RecordingStopped(object sender, StoppedEventArgs e) { if (waveSource != null) { waveSource.Dispose(); waveSource = null; } if (waveFile != null) { // waveFile.Dispose(); // waveFile = null; } }
public static Boolean MicrophoneAvailableTest(int deviceId) { NAudio.Wave.WaveIn sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceId; try { sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceId).Channels); return(true); } catch (Exception e) { return(false); } }
private void microphoneList_SelectedIndexChanged(object sender, EventArgs e) { deviceNumber = microphoneList.SelectedIndex; if (sourceStream != null) { sourceStream.StopRecording(); } sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
//Event args to stop recording events private void wavSource_RecordingStop(object sender, NAudio.Wave.StoppedEventArgs e) { if (wavSource != null) { wavSource.Dispose(); wavSource = null; } if (wavFile != null) { wavFile.Dispose(); wavFile = null; } //recBtn.Enabled = true; }
private void stopRecordSound() { if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } }
private void Button_Click_1(object sender, RoutedEventArgs e) { sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(16000, 1);//NAudio.Wave.WaveIn.GetCapabilities(0).Channels); sourceStream.DataAvailable += waveIn_DataAvailable; sourceStream.StartRecording(); //NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); //waveOut = new NAudio.Wave.DirectSoundOut(); //waveOut.Init(waveIn); //waveOut.Play(); }
static void Main(string[] args) { //NSpeex.SpeexEncoder encoder = new SpeexEncoder(BandMode.Wide); NAudio.Wave.WaveIn waveIn = new NAudio.Wave.WaveIn() { WaveFormat = new WaveFormat(8000, 1) }; waveIn.DataAvailable += WaveIn_DataAvailable; waveIn.StartRecording(); IWavePlayer waveOut = new WaveOut(); Console.ReadLine(); }
public void editorSesDinlemeDurdur() { if (waveOut != null) { waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } }
private void stopRecordSound() { if (_sourceStream != null) { _sourceStream.StopRecording(); _sourceStream.Dispose(); _sourceStream = null; } if (_waveWriter != null) { _waveWriter.Dispose(); _waveWriter = null; } }
private void button11_Click(object sender, EventArgs e) { if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } recording_timer.Stop(); }
private void recordButton_Click(object sender, EventArgs e) { state = "record"; recordButton.Enabled = false; outputFilename = String.Format("Clip {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now); outputFilePath = Path.Combine(outputFolder, outputFilename); Debug.Print(outputFilePath); sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(sampleRate, inChannels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(outputFilePath, sourceStream.WaveFormat); sourceStream.StartRecording(); }
private void button_stop_Click(object sender, EventArgs e) { waveOut?.Stop(); waveOut?.Dispose(); waveOut = null; sourceStream?.StopRecording(); sourceStream?.Dispose(); sourceStream = null; waveWriter?.Dispose(); waveWriter = null; //Label this.Label_Status.Text = "待機中"; }
public void editorSesDinlemeBaslat() { int deviceNumber = sesCihazComboBox.SelectedIndex; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(48000, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); waveOut = new NAudio.Wave.DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); waveOut.Play(); onbellekSayiLabel.Text = "Kullanılacak Önbellek Sayısı: " + sourceStream.NumberOfBuffers; }
//NAudio.Wave.WaveFileWriter waveWriter = null; //NAudio.Wave.WaveFileReader waveReader = null; //NAudio.Wave.DirectSoundOut output = null; public frmRecording() { InitializeComponent(); this.WindowState = FormWindowState.Minimized; this.ShowInTaskbar = false; outputFilename = String.Format("Clip {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now); outputFilePath = Path.Combine(outputFolder, outputFilename); Debug.Print(outputFilePath); sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(sampleRate, inChannels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(outputFilePath, sourceStream.WaveFormat); sourceStream.StartRecording(); }
private void Button_Click(object sender, RoutedEventArgs e) { //if (waveOut != null) //{ // waveOut.Stop(); // waveOut.Dispose(); // waveOut = null; //} if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; writer.Dispose(); writer.Close(); bw = new BackgroundWorker(); //identify(); } }
private void button1_Click(object sender, EventArgs e) { /* * A saját hangomat adja vissza a default mikrofonból */ int deviceNumber = 0; source = new NAudio.Wave.WaveIn(); source.DeviceNumber = deviceNumber; source.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(source); waveout = new NAudio.Wave.DirectSoundOut(); waveout.Init(waveIn); source.StartRecording(); waveout.Play(); }