// Method on button which starts recording private void record_Click(object sender, EventArgs e) { if (global != null) { global.Dispose(); } recording_timer.Start(); start = DateTime.Now; initRecord(false); sourceStream.StartRecording(); }
public SequenceRecorder(Conductor cond, KeyboardConfiguration key) { conductor = cond; keyboard = key; InitializeComponent(); int waveInDevices = NAudio.Wave.WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { NAudio.Wave.WaveInCapabilities deviceInfo = NAudio.Wave.WaveIn.GetCapabilities(waveInDevice); microphoneList.Items.Add(waveInDevice+": "+deviceInfo.ProductName); } microphoneList.SelectedIndex = 0; sensitivity.SelectedIndex = 0; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels); bytesPerChannel = (sourceStream.WaveFormat.BitsPerSample / 8); bytesPerSample = bytesPerChannel * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
// this gets called when the panel is loaded into the form private void ilPanel1_Load(object sender, EventArgs e) { m_shutdown = false; // setup the scene ilPanel1.Scene.Add(new ILPlotCube(twoDMode: false) { Children = { // create two line plots: the first is used to display the data itself ... new ILLinePlot(0, DefaultLinePlotTag, Color.Magenta, lineWidth: 1), // .. the second is used for marking magnitude peaks, it gets the line hidden new ILLinePlot(0, DefaultMarkerPlotTag, markerStyle:MarkerStyle.Square) { Line = { Visible = false }} }, // we want both axes in logarithmic scale ScaleModes = { XAxisScale = AxisScale.Logarithmic, YAxisScale = AxisScale.Logarithmic }, // configure axis labels Axes = { XAxis = { Label = { Text = "Frequency [1/\\omega]" }, LabelPosition = new Vector3(1, 1, 0) }, YAxis = { Label = { Text = "Magnitude [dB]" }, LabelPosition = new Vector3(1, 1, 0), LabelAnchor = new PointF(1,0) } } }); // setup audio stream (this is not related to ILNumerics but to the NAudio helper lib) m_waveInStream = new WaveIn(); m_waveInStream.WaveFormat = new WaveFormat(m_sampFreq, m_bitRate, 1); // 1: mono m_waveInStream.DeviceNumber = 0; m_waveInStream.BufferMilliseconds = (int)(m_fftlen / (float)m_sampFreq * 1010); // roughly one buffersize m_waveInStream.DataAvailable += new EventHandler<WaveInEventArgs>(waveInStream_DataAvailable); try { m_waveInStream.StartRecording(); } catch (NAudio.MmException exc) { // when no device exists or no microphone is plugged in, an exception will be thrown here MessageBox.Show("Error initializing audio device. Make sure that a default recording device is available!" + Environment.NewLine + "Error details:" + exc.Message); } }
//Starts recording private void startRecording() { ReadAudioForm3.backButton.Enabled = false; ReadAudioForm3.submitButton.Enabled = false; wavSource = new WaveIn(); wavSource.DataAvailable += new EventHandler <WaveInEventArgs>(wavSource_DataAvailable); wavSource.RecordingStopped += new EventHandler <StoppedEventArgs>(wavSource_RecordingStopped); //wavSource.WaveFormat = new WaveFormat(44100, 1); wavSource.WaveFormat = new WaveFormat(8000, 16, 1); String filename = @"C:\Pres_Proto\V2\MetroFrameworkDLLExample\RecordWAV\" + lvlStr + ".wav"; wavFile = new WaveFileWriter(filename, wavSource.WaveFormat); canvasHeight = waveCanvas.Height; canvasWidth = waveCanvas.Width; polyLine = new Polyline(); polyLine.Stroke = Brushes.Black; polyLine.StrokeThickness = 1; polyLine.Name = "waveform"; polyLine.MaxHeight = canvasHeight - 4; polyLine.MaxWidth = canvasWidth - 4; polyHeight = polyLine.MaxHeight; polyWidth = polyLine.MaxWidth; counter = 0; dispPoints = new Queue <Point>(); totalBytes = new List <byte>(); dispShots = new Queue <Int32>(); wavSource.StartRecording(); }
private void startRecordSound() { int deviceNumber = devices_cbx.SelectedIndex; int sampleRate = devices_cbx.SelectedIndex; if (sampleRate >= 0 && deviceNumber >= 0 && sampleRate < Constant.TextSampleRate.Count <string>()) { sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(Constant.SampleRate[sampleRate], NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); //NAudio.Wave.WaveInProvider waveIn = null; //waveIn = new NAudio.Wave.WaveInProvider(sourceStream); pathFile = VCDir.Instance.PathWaveFile; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(pathFile, sourceStream.WaveFormat); sourceStream.StartRecording(); // waveOut.Play(); } else { MessageBox.Show(""); return; } }
private void waveBtn_Click(object sender, EventArgs e) { if (sourceList.SelectedItems.Count == 0) { return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav)|*.wav;"; // jesli cos sie nie zgadza sie w oknie dialogowym - return if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } int deviceNumber = sourceList.SelectedItems[0].Index; sourceStream = new NAudio.Wave.WaveIn(); // przechowuje to co wchodzi sourceStream.DeviceNumber = deviceNumber; // z danego urzadzenia // format strumienia sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); // kiedy jest mozliwe pobranie nowych danych - pobierz ze strumienia sourceStream.DataAvailable += new EventHandler <WaveInEventArgs>(sourceStream_DataAvailable); // zapisz plik o takiej nazwie w formacie wav waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat); sourceStream.StartRecording(); }
private void button5_Click(object sender, EventArgs e) { if (SourceList.SelectedItems.Count == 0) { return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav|*.wav;"; if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } int deviceNumber = SourceList.SelectedItems[0].Index; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat); sourceStream.StartRecording(); }
private void Form1_Load(object sender, EventArgs e) { byte[] apk, ask, bpk, bsk; NaClClient.CreateKeys(out apk, out ask); NaClClient.CreateKeys(out bpk, out bsk); var hasher = System.Security.Cryptography.SHA256.Create(); _clientA = NaClClient.Create(apk, ask, bpk); _clientB = NaClClient.Create(bpk, bsk, apk); _sw = new Stopwatch(); _sw.Start(); _wave = new WaveIn(this.Handle); _wave.WaveFormat = new WaveFormat(12000, 8, 1); _wave.BufferMilliseconds = 100; _wave.DataAvailable += _wave_DataAvailable; _wave.StartRecording(); _playback = new BufferedWaveProvider(_wave.WaveFormat); _waveOut = new WaveOut(); _waveOut.DesiredLatency = 100; _waveOut.Init(_playback); _waveOut.Play(); }
private void startBtn_Click(object sender, RoutedEventArgs e) { if (sourceStream == null) { InitializeSound(); // sets up sourcestream, dataIn event, buffer and waveout for data to be sent timePassed = 300; } try { sourceStream.StartRecording(); StartTimer(); noteList = new List <int>(); NoteOnList = new List <NoteOnEvent>(); waveOut.Play(); } catch (NAudio.MmException exception) //this error may occur when no input device is connected { System.Windows.MessageBox.Show("No Input Device available"); } catch (InvalidOperationException) { } if (m_pitchTracker == null) { m_pitchTracker = new PitchTracker(); //initilises class for detecting pitch m_pitchTracker.SampleRate = m_sampleRate; // sample rate is set to 44100.0f as standard // m_pitchTracker.PitchDetected += OnPitchDetected; // m_audioBuffer = new float[(int)Math.Round(m_sampleRate * m_timeInterval / 1000.0)]; } }
public SequenceRecorder(Conductor cond, KeyboardConfiguration key) { conductor = cond; keyboard = key; InitializeComponent(); int waveInDevices = NAudio.Wave.WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { NAudio.Wave.WaveInCapabilities deviceInfo = NAudio.Wave.WaveIn.GetCapabilities(waveInDevice); microphoneList.Items.Add(waveInDevice + ": " + deviceInfo.ProductName); } microphoneList.SelectedIndex = 0; sensitivity.SelectedIndex = 0; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels); bytesPerChannel = (sourceStream.WaveFormat.BitsPerSample / 8); bytesPerSample = bytesPerChannel * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec) { source = new WaveIn(); source.BufferMilliseconds = 50; source.DeviceNumber = inputDeviceNumber; source.WaveFormat = codec.RecordFormat; source.DataAvailable += waveIn_DataAvailable; source.StartRecording(); sendVoice = new UdpClient(); sendVoice.EnableBroadcast = true; receiveVoice = new UdpClient(); receiveVoice.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.Broadcast, true); receiveVoice.Client.Bind(endPoint); sendVoice.Connect(endPoint); waveOut = new WaveOut(); waveProvider = new BufferedWaveProvider(codec.RecordFormat); waveOut.Init(waveProvider); waveOut.Play(); connected = true; var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint }; ThreadPool.QueueUserWorkItem(ListenerThread, state); }
private void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec) { waveIn = new WaveIn(); waveIn.BufferMilliseconds = 50; waveIn.DeviceNumber = inputDeviceNumber; waveIn.WaveFormat = codec.RecordFormat; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.StartRecording(); udpSender = new UdpClient(); udpListener = new UdpClient(); // To allow us to talk to ourselves for test purposes: // http://stackoverflow.com/questions/687868/sending-and-receiving-udp-packets-between-two-programs-on-the-same-computer udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true); udpListener.Client.Bind(endPoint); udpSender.Connect(endPoint); waveOut = new WaveOut(); waveProvider = new BufferedWaveProvider(codec.RecordFormat); waveOut.Init(waveProvider); waveOut.Play(); connected = true; var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint }; ThreadPool.QueueUserWorkItem(ListenerThread, state); }
public static void RecordSound(string name) { int waveDeviceCount = WaveIn.DeviceCount; //detect presence of recording hardware if (waveDeviceCount > 0) { inputDevice = 0; } else { MessageBox.Show("No recording hardware detected", "iMasomoAdmin", MessageBoxButton.OK, MessageBoxImage.Error); return; } wordName = name; try { waveIn = new WaveIn(); waveIn.DeviceNumber = inputDevice; waveIn.WaveFormat = new NAudio.Wave.WaveFormat(44100, WaveIn.GetCapabilities(inputDevice).Channels); //in the presence of incoming data, write the data to a buffer waveIn.DataAvailable += waveIn_DataAvailable; waveWriter = new WaveFileWriter(Environment.CurrentDirectory + @"\Media\" + wordName + ".wav", waveIn.WaveFormat); waveIn.StartRecording(); } catch(Exception ex) { MessageBox.Show(ex.Message); } }
void StartEncoding() { _startTime = DateTime.Now; _bytesSent = 0; _segmentFrames = 960; _encoder = new OpusEncoder(48000, 1, OpusNet.OpusApplication.Voip); _encoder.Bitrate = 8192; _decoder = new OpusDecoder(48000, 1); _bytesPerSegment = _encoder.FrameByteCount(_segmentFrames); _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()); _waveIn.BufferMilliseconds = 50; _waveIn.DeviceNumber = comboBox1.SelectedIndex; _waveIn.DataAvailable += _waveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(48000, 16, 1); _playBuffer = new BufferedWaveProvider(new WaveFormat(48000, 16, 1)); _waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback()); _waveOut.DeviceNumber = comboBox2.SelectedIndex; _waveOut.Init(_playBuffer); _waveOut.Play(); _waveIn.StartRecording(); if (_timer == null) { _timer = new Timer(); _timer.Interval = 1000; _timer.Tick += _timer_Tick; } _timer.Start(); }
private void timer1_Tick(object sender, EventArgs e) { if (conductor == null) { return; } if (skipCheck > 0) { --skipCheck; return; } if (reAcquireMicrophone) { reAcquireMicrophone = false; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); } /* * if (reAcquireMicrophone||(!directInput.IsDeviceAttached(MicrophoneGuid))) * { * if (Microphone != null) * Microphone.Unacquire(); * * IList<DeviceInstance> deviceInstance = directInput.GetDevices(DeviceType.Microphone, DeviceEnumerationFlags.AllDevices); * if (deviceInstance.Count > 0) * MicrophoneGuid = deviceInstance[0].InstanceGuid; * else * { * // don't check for another second * skipCheck = 100; * return; * } * * reAcquireMicrophone = false; * * // Instantiate the Microphone * Microphone = new Microphone(directInput, MicrophoneGuid); * * // Set BufferSize in order to use buffered data. * Microphone.Properties.BufferSize = 128; * * // Acquire the Microphone * Microphone.Acquire(); * * // allow values to stabilize before using them! * skipCount = 50; * } */ }
public static void PrepareMic() { WaveIn waveIn = new WaveIn(); waveIn.WaveFormat = new WaveFormat(SAMPLE_FREQ, 16, 1); waveIn.StartRecording(); waveIn.StopRecording(); waveIn.Dispose(); }
public void StartListener() { waveListener = new WaveIn(); waveListener.DeviceNumber = 0; waveListener.DataAvailable += ListenerDataAvailable; waveListener.WaveFormat = new WaveFormat(32000, 1); waveListener.StartRecording(); }
public MainWindow() { InitializeComponent(); this.WindowStartupLocation = System.Windows.WindowStartupLocation.CenterScreen; int waveInDevices = WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice); Console.WriteLine("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels); } waveIn = new WaveIn(); waveIn.BufferMilliseconds = 47*buffersize/2048; waveIn.DeviceNumber = 0; waveIn.WaveFormat = new WaveFormat(44100, 32, 1); waveIn.DataAvailable += waveIn_DataAvailable; try { waveIn.StartRecording(); } catch(NAudio.MmException e) { Console.WriteLine(e.ToString() + "\nPlug in a microphone!"); } bin = new int[buffersize * 2]; sampledata = new float[buffersize * 2]; priori = new double[buffersize * 2]; channelLabel = new int[1]; channelLabel[0] = 1; velocity = new int[1]; velocity[0] = 0; for (int i = 0; i < buffersize * 2; i++) { bin[i] = i; sampledata[i] = 0; priori[i] = 0; } chart1.Viewport.Visible = new DataRect(0, -1.0, buffersize * 2, 2.0); chart2.Viewport.Visible = new DataRect(1620, 0, 280, 110); bins = new EnumerableDataSource<int>(bin); bins.SetXMapping(x => x); rawIn = new EnumerableDataSource<float>(sampledata); rawIn.SetYMapping(y => y); CompositeDataSource comp1 = new CompositeDataSource(bins, rawIn); chart1.AddLineGraph(comp1); CompositeDataSource comp2 = new CompositeDataSource(bins, rawIn); chart2.AddLineGraph(comp2); }
public void StartAudioIn() { m_WaveIn = new WaveIn(); m_WaveIn.DataAvailable += new EventHandler<WaveInEventArgs>(WaveIn_DataAvailable); //m_WaveIn.WaveFormat = new WaveFormat(44100, 32, 2); m_WaveIn.WaveFormat = new WaveFormat(44100, 16, 1); m_WaveIn.StartRecording(); }
public void Record(string fileName, int volume = 100) { _waveIn = new WaveIn { WaveFormat = new WaveFormat() }; _writer = new WaveFileWriter(fileName, _waveIn.WaveFormat); TrySetVolumeControl(_waveIn.GetMixerLine(), volume); _waveIn.DataAvailable += new_dataAvailable; _waveIn.StartRecording(); }
private void recordBtn_Click(object sender, EventArgs e) { if (setMode) { try { String filename = "Class" + LoginForm.classSec + "_kidWordAudio/test.wav"; recordBtn.Text = "STOP"; wavSource = new NAudio.Wave.WaveIn(); wavSource.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1); wavSource.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(wavSource_DataAvail); wavSource.RecordingStopped += new EventHandler <NAudio.Wave.StoppedEventArgs>(wavSource_RecordingStop); wavFile = new NAudio.Wave.WaveFileWriter(filename, wavSource.WaveFormat); wavSource.StartRecording(); setMode = false; } catch (Exception) { throw; } } else { //When you press "STOP", it automatically compares wavSource.StopRecording(); String recordWAV_file = "Class" + LoginForm.classSec + "_kidWordAudio/test.wav"; String refWAV_file = "Class" + LoginForm.classSec + "_kidWordAudio/" + levels[curPos] + ";.wav"; java.io.File f1 = new java.io.File(recordWAV_file); java.io.File f2 = new java.io.File(refWAV_file); if (!f1.exists() || !f2.exists()) { MessageBox.Show("WARNING: One of the files might be missing!"); } else { float compute_Result = compareAudio(recordWAV_file, refWAV_file); if (compute_Result >= 10.0) { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n You Win !"); } else { MessageBox.Show("Matched: " + compute_Result.ToString() + "\n Try Again !"); } } recordBtn.Text = "RECORD"; setMode = true; } }
public void StartRecording() { waveInStream = new WaveIn(); waveInStream.WaveFormat = new WaveFormat(44100, 16, 1); writer = new WaveFileWriter(FileName, waveInStream.WaveFormat); waveInStream.DataAvailable += WaveInStream_DataAvailable; waveInStream.StartRecording(); }
public void InitializeNAudio(int device) { waveIn = new WaveIn(); waveIn.DeviceNumber = device; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.RecordingStopped += new EventHandler<StoppedEventArgs>(waveIn_RecordingStopped); int sampleRate = 8000; // 8 kHz int channels = 1; // mono waveIn.WaveFormat = new WaveFormat(sampleRate, channels); waveIn.StartRecording(); }
public void StartListening() { if (!IsActive) { _waveIn = new WaveIn {DeviceNumber = Options.DeviceNumber}; _waveIn.DataAvailable += WaveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(Options.MicrophoneBitRate, 16, 1); _waveIn.StartRecording(); OnAfterStartListening(); } }
public void RestartRecording() { if (WaveInEvent.DeviceCount > 0) { waveIn = new WaveIn(); int inputDeviceNumber = WaveInEvent.DeviceCount - 1; waveIn.DeviceNumber = inputDeviceNumber; waveIn.BufferMilliseconds = 10; waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(waveIn_DataAvailable); waveIn.StartRecording(); } }
private void btn_RECSTOP_Click(object sender, EventArgs e) { if (!isRecording) { if (cmb_InputsList.SelectedItem == null) { MessageBox.Show("Error! \n No Input Selected, Please select an Audio Input before recording"); return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File(*.wav)|*.wav;"; if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } else { lst_samplelist.Items.Add(save.FileName); } int deviceNumber = cmb_InputsList.SelectedIndex; InputStream = new NAudio.Wave.WaveIn(); InputStream.DeviceNumber = deviceNumber; InputStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); InputStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(InputStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, InputStream.WaveFormat); InputStream.StartRecording(); btn_RECSTOP.Text = "STOP"; isRecording = true; } else { if (InputStream != null) { InputStream.StopRecording(); InputStream.Dispose(); InputStream = null; } if (waveWriter != null) { waveWriter.Dispose(); waveWriter = null; } btn_RECSTOP.Text = "REC"; isRecording = false; } }
public Recorder() { recordingState = RecordingState.Monitoring; waveIn = new WaveIn(); waveIn.DataAvailable += waveIn_DataAvailable; waveIn.RecordingStopped += waveIn_RecordingStopped; waveIn.BufferMilliseconds = 15; waveIn.WaveFormat = new WaveFormat(44100, 16, 2); if (!(bool)(DesignerProperties.IsInDesignModeProperty.GetMetadata(typeof(DependencyObject)).DefaultValue)) waveIn.StartRecording(); }
public void Start() { waveIn = new WaveIn(); waveIn.DeviceNumber = 0; // The number of milliseconds to get a buffer size of SAMPLES_IN_FFT waveIn.BufferMilliseconds = 1000 / (SAMPLE_RATE / samplesPerBatch); waveIn.DataAvailable += waveIn_DataAvailable; // 44.1khz 16-bit mono waveIn.WaveFormat = new WaveFormat(SAMPLE_RATE, BIT_DEPTH, 1); data = new float[samplesPerBatch]; waveIn.StartRecording(); }
void SetupAudioSource() { if (inWave != null) { inWave.StopRecording(); inWave.Dispose(); inWave = null; } inWave = new WaveIn(); inWave.DeviceNumber = audioSource; inWave.DataAvailable += inWave_DataAvailable; inWave.StartRecording(); }
private void microphoneList_SelectedIndexChanged(object sender, EventArgs e) { deviceNumber = microphoneList.SelectedIndex; if (sourceStream != null) { sourceStream.StopRecording(); } sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); bytesPerSample = (sourceStream.WaveFormat.BitsPerSample / 8) * sourceStream.WaveFormat.Channels; sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); sourceStream.StartRecording(); }
private void OnStartRecordingClick(object sender, RoutedEventArgs e) { recorder = new WaveIn(); recorder.DataAvailable += RecorderOnDataAvailable; bufferedWaveProvider = new BufferedWaveProvider(recorder.WaveFormat); savingWaveProvider = new SavingWaveProvider(bufferedWaveProvider, "temp.wav"); player = new WaveOut(); player.Init(savingWaveProvider); player.Play(); recorder.StartRecording(); }
/// <summary> /// Inicia a captura de som do microfone /// </summary> public void IniciarGravacao(string localArquivo) { waveSource = new WaveIn {DeviceNumber = SelectedWaveInDevice, WaveFormat = new WaveFormat(44100, 1)}; waveSource.DataAvailable += waveSource_DataAvailable_write; if (File.Exists(localArquivo)) File.Delete(localArquivo); var directory = Path.GetDirectoryName(localArquivo); if (!Directory.Exists(directory)) Directory.CreateDirectory(directory); waveFile = new WaveFileWriter(localArquivo, waveSource.WaveFormat); waveSource.StartRecording(); }
public override void Init(int DeviceID = 0, int channels = 2, int samplerate = 44100, int bufferMax = (44100 * 3)) { base.Init(DeviceID, channels, samplerate, bufferMax); DisposeWaveIn(); waveIn = new WaveIn() { DeviceNumber = DeviceID }; waveIn.WaveFormat = new WaveFormat(samplerate, 16, channels); waveIn.BufferMilliseconds = 30; waveIn.DataAvailable += WaveIn_DataAvailable; result = new float[bufferMax]; buffer = new Collections.CircularBuffer<float>(bufferMax); waveIn.StartRecording(); e = new Thread(new ThreadStart(EventThread)); Run = true; e.Start(); }
private void Button_Click_1(object sender, RoutedEventArgs e) { sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(16000, 1);//NAudio.Wave.WaveIn.GetCapabilities(0).Channels); sourceStream.DataAvailable += waveIn_DataAvailable; sourceStream.StartRecording(); //NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); //waveOut = new NAudio.Wave.DirectSoundOut(); //waveOut.Init(waveIn); //waveOut.Play(); }
private void button1_Click(object sender, EventArgs e) { StartBtn.Enabled = false; StopBtn.Enabled = true; waveSource = new WaveIn(); waveSource.WaveFormat = new WaveFormat(SamplingRate, 1 ); waveSource.DataAvailable += new EventHandler<WaveInEventArgs>(waveSource_DataAvailable); waveSource.RecordingStopped += new EventHandler<StoppedEventArgs>(waveSource_RecordingStopped); //waveFile = new WaveFileWriter(@"C:\Temp\Test0001.wav", waveSource.WaveFormat); waveSource.StartRecording(); }
static void Main(string[] args) { //NSpeex.SpeexEncoder encoder = new SpeexEncoder(BandMode.Wide); NAudio.Wave.WaveIn waveIn = new NAudio.Wave.WaveIn() { WaveFormat = new WaveFormat(8000, 1) }; waveIn.DataAvailable += WaveIn_DataAvailable; waveIn.StartRecording(); IWavePlayer waveOut = new WaveOut(); Console.ReadLine(); }
public void BeginMonitoring(int recordingDevice) { if(recordingState != RecordingState.Stopped) { throw new InvalidOperationException("Can't begin monitoring while we are in this state: " + recordingState.ToString()); } waveIn = new WaveIn(); waveIn.DeviceNumber = recordingDevice; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.RecordingStopped += new EventHandler(waveIn_RecordingStopped); waveIn.WaveFormat = recordingFormat; waveIn.StartRecording(); TryGetVolumeControl(); recordingState = RecordingState.Monitoring; }
public void editorSesDinlemeBaslat() { int deviceNumber = sesCihazComboBox.SelectedIndex; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(48000, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); waveOut = new NAudio.Wave.DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); waveOut.Play(); onbellekSayiLabel.Text = "Kullanılacak Önbellek Sayısı: " + sourceStream.NumberOfBuffers; }
public void StartRecording() { if (isRecording) return; waveIn = new WaveIn(); waveIn.DeviceNumber = 0; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.RecordingStopped += waveIn_RecordingStopped; waveIn.WaveFormat = recordingFormat; waveIn.StartRecording(); isRecording = true; writer = new WaveFileWriter("demo.wav", recordingFormat); }
private void recordButton_Click(object sender, EventArgs e) { state = "record"; recordButton.Enabled = false; outputFilename = String.Format("Clip {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now); outputFilePath = Path.Combine(outputFolder, outputFilename); Debug.Print(outputFilePath); sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(sampleRate, inChannels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(outputFilePath, sourceStream.WaveFormat); sourceStream.StartRecording(); }
//NAudio.Wave.WaveFileWriter waveWriter = null; //NAudio.Wave.WaveFileReader waveReader = null; //NAudio.Wave.DirectSoundOut output = null; public frmRecording() { InitializeComponent(); this.WindowState = FormWindowState.Minimized; this.ShowInTaskbar = false; outputFilename = String.Format("Clip {0:yyy-MM-dd HH-mm-ss}.wav", DateTime.Now); outputFilePath = Path.Combine(outputFolder, outputFilename); Debug.Print(outputFilePath); sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(sampleRate, inChannels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(outputFilePath, sourceStream.WaveFormat); sourceStream.StartRecording(); }
private void StartRecording(int deviceNumber) { // Setup Incoming m_WaveIn = new WaveIn(); m_WaveIn.BufferMilliseconds = 50; // This is very very important. m_WaveIn.DeviceNumber = deviceNumber; m_WaveIn.DataAvailable += WaveIn_DataAvailable; m_WaveIn.WaveFormat = m_waveFormat; m_WaveIn.StartRecording(); // Setup Output waveOut = new WaveOut(); waveProvider = new BufferedWaveProvider(m_waveFormat); waveOut.Init(waveProvider); waveOut.Play(); }
private void button1_Click(object sender, EventArgs e) { /* * A saját hangomat adja vissza a default mikrofonból */ int deviceNumber = 0; source = new NAudio.Wave.WaveIn(); source.DeviceNumber = deviceNumber; source.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(source); waveout = new NAudio.Wave.DirectSoundOut(); waveout.Init(waveIn); source.StartRecording(); waveout.Play(); }
public Form1() { InitializeComponent(); cameraControl1.Size = new System.Drawing.Size(640, 480); cameraControl1.Start(0, true); wo = new WaveOut(); wi = new WaveIn(); wi.DataAvailable += new EventHandler<WaveInEventArgs>(wi_DataAvailable); bwp = new BufferedWaveProvider(wi.WaveFormat); bwp.DiscardOnBufferOverflow = true; wo.Init(bwp); wi.StartRecording(); wo.Play(); }
public void run(bool male, int duration, string path) { fileDuration = duration; outputPath = path; bufferMilliseconds = fileDuration * 1000; isMale = male; waveFormat = new WaveFormat(fs, bits, nChannels); sourceStream = new WaveIn(); sourceStream.BufferMilliseconds = bufferMilliseconds; sourceStream.WaveFormat = waveFormat; sourceStream.DataAvailable += sourceStreamDataAvailable; if (!recording) { sourceStream.StartRecording(); recording = true; } }
private void button_Start_Click(object sender, EventArgs e) { //マイク元を指定していない場合。 if (listview_sources.SelectedItems.Count == 0) { return; } //オーディオチェーン:WaveIn(rec) ⇒ Callback() ⇒ waveWriter //録音先のwavファイル SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav)|*.wav;"; if (save.ShowDialog() != DialogResult.OK) { return; } //選択した録音デバイス番号 int deviceNumber = listview_sources.SelectedItems[0].Index; //waveIn selet Recording Deivce sourceStream = new WaveIn(); //sourceStreamは、78で定義 sourceStream.DeviceNumber = deviceNumber; // sourceStream.WaveFormat = new WaveFormat(16000, WaveIn.GetCapabilities(deviceNumber).Channels); sourceStream.WaveFormat = new WaveFormat(16000, 1); //録音のコールバックkな数 k?? sourceStream.DataAvailable += new EventHandler <WaveInEventArgs>(sourceStream_DataAvailable); //wave 出力 waveWriter = new WaveFileWriter(save.FileName, sourceStream.WaveFormat); //Label this.Label_Status.Text = "録音中" + "\r\n" + "開始時間:" + DateTime.Now;; //録音開始 sourceStream.StartRecording(); }
public Form1() { InitializeComponent(); sampleAggregator.FftCalculated += new EventHandler<FftEventArgs>(FftCalculated); sampleAggregator.PerformFFT = true; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels); sourceStream.DataAvailable += OnDataAvailable; //NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); //waveOut = new NAudio.Wave.DirectSoundOut(); // waveOut.Init(waveIn); sourceStream.StartRecording(); // waveOut.Play(); }
public void StartRecording(EventHandler<WaveInEventArgs> OnDataAvailable) { waveIn = new WaveIn(); waveIn.DataAvailable += waveIn_OnDataAvailable; waveIn.DataAvailable += OnDataAvailable; waveIn.RecordingStopped += OnDataStopped; waveIn.WaveFormat = new WaveFormat(SAMPLE_FREQ, 16, 1); waveIn.StartRecording(); if (SaveFile) { string fileName = DateTime.Now.TimeOfDay.ToString().Remove(8, 8).Replace(':', '.') + ".wav"; Directory.CreateDirectory("waves"); WFile = new WaveFileWriter("waves/" + fileName, waveIn.WaveFormat); Logger.Add("Sound File recorded in " + fileName); } else { Logger.Add("Sound File isn't recording"); } }
public override void Init() { base.Init(); writeIndex = 0; Buffering = 0; waveInDispose(); waveIn = new WaveIn() { DeviceNumber = master.form.SoundList.SelectedIndex, }; waveIn.DataAvailable += WaveIn_DataAvailable; waveIn.BufferMilliseconds = 14; waveIn.WaveFormat = new WaveFormat(rate, 16, 2); waveIn.StartRecording(); Buffer_Low = CreateFilterBuffer(); Buffer_Mid = CreateFilterBuffer(); Buffer_High = CreateFilterBuffer(); Param_Low = CreateFilterParam(FilterType.LowPass, 800, 3f); Param_Mid = CreateFilterParam(FilterType.BandPass, 2000, 0.1f); Param_High = CreateFilterParam(FilterType.HighPass, 8000, 2f); }
public Form1() { InitializeComponent(); sampleAggregator.FftCalculated += new EventHandler <FftEventArgs>(FftCalculated); sampleAggregator.PerformFFT = true; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels); sourceStream.DataAvailable += OnDataAvailable; //NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); //waveOut = new NAudio.Wave.DirectSoundOut(); // waveOut.Init(waveIn); sourceStream.StartRecording(); // waveOut.Play(); }
private void button2_Click(object sender, EventArgs e) { if (SourceList.SelectedItems.Count == 0) { return; } int deviceNumber = SourceList.SelectedItems[0].Index; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream); waveOut = new NAudio.Wave.DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); waveOut.Play(); }
private void btnIniciar_Click(object sender, EventArgs e) { string pasta = @"C:\PEDGRAVACAO\Audios", nmAudio; int dia = DateTime.Now.Day, mes = DateTime.Now.Month, ano = DateTime.Now.Year; if (!Directory.Exists(pasta)) { Directory.CreateDirectory(pasta); } if (micList.SelectedItem == null) { MessageBox.Show("Por favor selecionar o Microfone a ser usado!"); } else { nmAudio = ano.ToString() + "_" + mes.ToString() + "_" + dia.ToString() + ".wav"; String nvCaminho = Path.Combine(pasta, nmAudio); var deviceNumber = micList.SelectedIndex; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(16000, 1); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(nvCaminho, sourceStream.WaveFormat); sourceStream.StartRecording(); btnParar.Enabled = true; btnIniciar.Enabled = false; } }
private void startRecordSound() { int deviceNumber = devices_cbx.SelectedIndex; int sampleRate = sampleRate_cbx.SelectedIndex; if (sampleRate >= 0 && deviceNumber >= 0 && sampleRate < Constant.TextSampleRate.Count <string>()) { _sourceStream = new NAudio.Wave.WaveIn(); _sourceStream.DeviceNumber = deviceNumber; _sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(Constant.SampleRate[sampleRate], 1); _yourPath = VCDir.Instance.PathWaveFile; _yourChanged = true; _sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); _waveWriter = new NAudio.Wave.WaveFileWriter(_yourPath, _sourceStream.WaveFormat); _sourceStream.StartRecording(); } else { return; } }
void StartAudioRecord(int AudioDeviceIndex, int Channel) { try { waveSource = new NAudio.Wave.WaveIn(); waveSource.WaveFormat = new NAudio.Wave.WaveFormat(44100, Channel); waveSource = new WaveIn { DeviceNumber = AudioDeviceIndex, BufferMilliseconds = (int)Math.Ceiling((double)(1000 / 10)), NumberOfBuffers = 3, WaveFormat = new WaveFormat(44100, 16, Channel) }; waveSource.DataAvailable += new EventHandler <WaveInEventArgs>(waveSource_DataAvailable); waveSource.RecordingStopped += new EventHandler <StoppedEventArgs>(waveSource_RecordingStopped); // waveFile = new NAudio.Wave.WaveFileWriter(Program.Localpath+"\\raw.wav", waveSource.WaveFormat); waveSource.StartRecording(); } catch (Exception ex) { ClsCommon.WriteLog(ex.Message + " Method :-Start Audio Recording."); } }
bool Recording = false; //as default, the string of notes are not being recorded private void startBtn_Click(object sender, EventArgs e) // this button is meant to be where the notes played become listed { InitializeSound(); // sets up sourcestream, datain event, buffer and waveout for data to be sent //the below is all for saving the .wav file as it goes along. come back to later //try //{ // waveWriter = new NAudio.Wave.WaveFileWriter(SFD.FileName, sourceStream.WaveFormat); // //attempts to create a filewriter using the openfiledialogue path and the sourcesream waveformat // //} //catch (NullReferenceException) //both these errors occur when no file has been selected //{ // System.Windows.MessageBox.Show("Please select save file name and location"); // return; //} //catch (ArgumentException) //{ // System.Windows.MessageBox.Show("Please select save file name and location"); // return; //} try { sourceStream.StartRecording(); } catch (NAudio.MmException exception) //this error may occur when no input device is connected { System.Windows.MessageBox.Show("No Input Device available"); } // waveOut.Play(); m_pitchTracker = new PitchTracker(); //initilises class for detecting pitch m_pitchTracker.SampleRate = m_sampleRate; // sample rate is set to 44100.0f as standard // m_pitchTracker.PitchDetected += OnPitchDetected; // m_audioBuffer = new float[(int)Math.Round(m_sampleRate * m_timeInterval / 1000.0)]; StartTimer(); // UpdateDisplay(); //we need to initialise the customsheetmusicstaff item here once its done //Also call the "fittoscreen" method /*StartTimer(); */ //starts the timer. this was meant as part of the sheetmusic timing indications title = titleBox.Text; //Set the given title name to the title at the top of the sheet music form. //SheetMusicForm frm = new SheetMusicForm(); // initialises the sheet music form //frm.Show(); // shows the form // ControlPanels.PianoRoll pRoll = new ControlPanels.PianoRoll(); //this was an experiment to try and create a pianoroll style layout ControlPanels.MyPianoRollCreator pRoll = new ControlPanels.MyPianoRollCreator(); var host = new Window(); //xml forms can not be diplayed in the same way normal forms can. they need a "host" host.Content = pRoll; //fills that host with the xaml code i produced to draw the pianoroll style form host.Show(); ControlPanels.RollCreaterControls pRollControlls = new ControlPanels.RollCreaterControls(); //var host2 = new Window(); //xml forms can not be diplayed in the same way normal forms can. they need a "host" //host2.Content = pRollControlls; //fills that host with the xaml code i produced to draw the pianoroll style form //host2.MaxHeight = 92; //host2.MaxWidth = 592; //host2.WindowStyle = WindowStyle.ToolWindow ; //host2.Show(); RecordString = ""; //the string of notes is cleared from last time Recording = true; //the notes are being recorded stopBtn.Enabled = true; // Change Color of button composerBtn.BackColor = Color.PowderBlue; }
private async void tlumaczenienamorsapluszapis(object sender, EventArgs e) { textBox2.Text = null; if (sourceList.SelectedItems.Count == 0) { MessageBox.Show("Wybierz źródło nagrywania"); } else { SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav)|*.wav;"; if (save.ShowDialog() != System.Windows.Forms.DialogResult.OK) { return; } int deviceNumber = sourceList.SelectedItems[0].Index; sourceStream = new NAudio.Wave.WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); sourceStream.DataAvailable += new EventHandler <NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable); waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat); sourceStream.StartRecording(); char[] tablica = null; String input = textBox1.Text; input = input.ToUpper(); Dictionary <char, String> morse = new Dictionary <char, String>() { { 'A', ".-" }, { 'Ą', ".-.-" }, { 'B', "-..." }, { 'C', "-.-." }, { 'Ć', "-.-.." }, { 'D', "-.." }, { 'E', "." }, { 'Ę', "..-.." }, { 'F', "..-." }, { 'G', "--." }, { 'H', "...." }, { 'I', ".." }, { 'J', ".---" }, { 'K', "-.-" }, { 'L', ".-.." }, { 'Ł', ".-..-" }, { 'M', "--" }, { 'N', "-." }, { 'Ń', "--.--" }, { 'O', "---" }, { 'Ó', "---." }, { 'P', ".--." }, { 'Q', "--.-" }, { 'R', ".-." }, { 'S', "..." }, { 'Ś', "...-..." }, { 'T', "-" }, { 'U', "..-" }, { 'V', "...-" }, { 'W', ".--" }, { 'X', "-..-" }, { 'Y', "-.--" }, { 'Z', "--.." }, { 'Ż', "--..-." }, { 'Ź', "--..-" }, { '0', "-----" }, { '1', ".----" }, { '2', "..---" }, { '3', "...--" }, { '4', "....-" }, { '5', "....." }, { '6', "-...." }, { '7', "--..." }, { '8', "---.." }, { '9', "----." }, }; for (int i = 0; i < input.Length; i++) { if (i > 0) { textBox2.Text += "/"; await Task.Delay(2000); } if (input[i] == ' ') { await Task.Delay(5000); } else { char c = input[i]; if (morse.ContainsKey(c)) { textBox2.Text += morse[c]; } tablica = morse[c].ToCharArray(); for (int i2 = 0; i2 < tablica.Length; i2++) { if (tablica[i2] == '.') { Action beep = () => Console.Beep(1000, 600); await Task.Delay(2000); beep.BeginInvoke(null, null); } if (tablica[i2] == '-') { Action beep2 = () => Console.Beep(1000, 1000); await Task.Delay(2000); beep2.BeginInvoke(null, null); } } } } if (waveOut != null) { await Task.Delay(1000); waveOut.Stop(); waveOut.Dispose(); waveOut = null; } if (sourceStream != null) { await Task.Delay(1000); sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (waveWriter != null) { await Task.Delay(1000); waveWriter.Dispose(); waveWriter = null; } } }