public WaveInMeter(int samples) { _args = new WaveLevelEventArgs(); var format = new WaveFormatPcm(44100, 16, 1); _waveIn = new WaveIn(new MeterStream(this), format, format.FrameSize * samples); _waveIn.Start(); }
/// <summary> /// Construct a new voice loopback session. /// </summary> /// <param name="codec">An audio codec to encode and decode with.</param> /// <param name="quality">The encoding quality (usually the sample rate).</param> public VoiceLoopback(VoiceCodec codec, int quality) { var info = new CodecInfo(codec, quality); _waveIn = new WaveIn(this, info.DecodedFormat, info.DecodedBufferSize); _waveOut = new WaveOut(this, info.EncodedFormat, info.EncodedBufferSize); _encoder = new AudioConverter(info.DecodedBufferSize, info.DecodedFormat, info.EncodedFormat); }
private void InitAudio() { if (_waveIn != null) { _waveIn.Close(); } _waveIn = new WaveIn(this, _codec.DecodedFormat, _codec.DecodedBufferSize); _encoder = new AudioConverter(_codec.DecodedBufferSize, _codec.DecodedFormat, _codec.EncodedFormat); }
/// <summary> /// Default constructor. /// </summary> /// <param name="device">Audio input device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz).</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <exception cref="ArgumentNullException">Is raised when <b>device</b> is null reference.</exception> /// <exception cref="ArgumentException">Is raised when any of the arguments has invalid value.</exception> public AudioIn(AudioInDevice device,int samplesPerSec,int bitsPerSample,int channels) { if(device == null){ throw new ArgumentNullException("device"); } if(samplesPerSec < 1){ throw new ArgumentException("Argument 'samplesPerSec' value must be >= 1.","samplesPerSec"); } if(bitsPerSample < 8){ throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8.","bitsPerSample"); } if(channels < 1){ throw new ArgumentException("Argument 'channels' value must be >= 1.","channels"); } m_pDevice = device; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_pWaveIn = new WaveIn(device,samplesPerSec,bitsPerSample,channels,320); m_pWaveIn.Start(); }
private void Button_Stop_Click(object sender, RoutedEventArgs e) { try { waveSource.StopRecording(); // Close Wave (not needed under synchronous situation) if (waveSource != null) { waveSource.Dispose(); waveSource = null; } var response = request.GetResponse(); using (var responseStream = response.GetResponseStream()) using (var streamReader = new StreamReader(responseStream)) { var responseJsonText = streamReader.ReadToEnd(); // The result in JSON format, with pronunciation score ScoreResult result = JsonConvert.DeserializeObject <ScoreResult>(responseJsonText); if (null != result && "Success" == result.RecognitionStatus) { NBestItem nBestItem = result.NBest[0]; var pronScore = JsonConvert.SerializeObject(nBestItem); PronScoreWebBrowser.InvokeScript("generatePronScoreTable", pronScore); PronScoreWebBrowser.InvokeScript("generatePhoneScoreTable", pronScore); PronScoreWebBrowser.Visibility = Visibility.Visible; List <WordsItem> witems = result.NBest[0].Words; for (int i = 0; i < witems.Count; i++) { WordsItem w = witems[i]; if (w.AccuracyScore <= 60.0) { ChangeColor(Colors.Red, ReferenceText, w.Word.ToLower(), w.AccuracyScore); } else if (w.AccuracyScore <= 70.0) { ChangeColor(Colors.Orange, ReferenceText, w.Word.ToLower(), w.AccuracyScore); } else { ChangeColor(Colors.Green, ReferenceText, w.Word.ToLower(), w.AccuracyScore); } } } else { if (null != result) { MessageBox.Show($"Recognition status: {result.RecognitionStatus}"); } } } } catch (Exception ex) { MessageBox.Show(ex.Message); } finally { progressRing.IsActive = false; StartBut.Visibility = Visibility.Visible; StopBut.Visibility = Visibility.Collapsed; } }
public void setup() { sampleAggregatorL = new SampleAggregator[fftCount]; sampleAggregatorR = new SampleAggregator[fftCount]; for (int i = 0; i < fftCount; i++) { sampleAggregatorL[i] = new SampleAggregator(fftLength); sampleAggregatorL[i].FftCalculated += FftCalculatedL; sampleAggregatorL[i].PerformFFT = true; for (int j = 0; j < fftLength * i / fftCount; j++) { sampleAggregatorL[i].Add(0f); } sampleAggregatorR[i] = new SampleAggregator(fftLength); sampleAggregatorR[i].FftCalculated += FftCalculatedR; sampleAggregatorR[i].PerformFFT = true; for (int j = 0; j < fftLength * i / fftCount; j++) { sampleAggregatorR[i].Add(0f); } } //wo = new WaveOut(); wi = new WaveIn(); var caps = new List <WaveInCapabilities>(); int sm_devnum = 0; for (int i = 0; i < WaveIn.DeviceCount; i++) { var cap = WaveIn.GetCapabilities(i); caps.Add(cap); if (cap.ProductName.Contains("Stereo Mix")) { wi.DeviceNumber = i; sm_devnum = i; } if (cap.ProductName == DeviceName) { wi.DeviceNumber = i; sm_devnum = -1; break; } } if (sm_devnum != -1) { wi.DeviceNumber = sm_devnum; } _deviceName = caps[wi.DeviceNumber].ProductName; _deviceNameChanged = false; /* * for (int i = 0; i < WaveOut.DeviceCount; i++) * { * var cap = WaveOut.GetCapabilities(i); * if (cap.ProductName.Contains("Logitech")) * wo.DeviceNumber = i; * } */ wi.WaveFormat = new WaveFormat(48000, 16, 2); wi.DataAvailable += new EventHandler <WaveInEventArgs>(wi_DataAvailable); wi.BufferMilliseconds = 25; //bwp = new BufferedWaveProvider(wi.WaveFormat); //bwp.DiscardOnBufferOverflow = true; //wo.Init(bwp); wi.StartRecording(); //wo.Play(); }
public void StartEncoding(string guid, InputDeviceManager inputManager, IPAddress ipAddress, int port) { MMDevice speakers = null; if (_audioOutputSingleton.SelectedAudioOutput.Value == null) { speakers = WasapiOut.GetDefaultAudioEndpoint(); } else { speakers = (MMDevice)_audioOutputSingleton.SelectedAudioOutput.Value; } MMDevice micOutput = null; if (_audioOutputSingleton.SelectedMicAudioOutput.Value != null) { micOutput = (MMDevice)_audioOutputSingleton.SelectedMicAudioOutput.Value; } try { _micInputQueue.Clear(); InitMixers(); InitAudioBuffers(); //Audio manager should start / stop and cleanup based on connection successfull and disconnect //Should use listeners to synchronise all the state _waveOut = new WasapiOut(speakers, AudioClientShareMode.Shared, true, 40, windowsN); //add final volume boost to all mixed audio _volumeSampleProvider = new VolumeSampleProviderWithPeak(_clientAudioMixer, (peak => SpeakerMax = (float)VolumeConversionHelper.ConvertFloatToDB(peak))); _volumeSampleProvider.Volume = SpeakerBoost; if (speakers.AudioClient.MixFormat.Channels == 1) { if (_volumeSampleProvider.WaveFormat.Channels == 2) { _waveOut.Init(_volumeSampleProvider.ToMono()); } else { //already mono _waveOut.Init(_volumeSampleProvider); } } else { if (_volumeSampleProvider.WaveFormat.Channels == 1) { _waveOut.Init(_volumeSampleProvider.ToStereo()); } else { //already stereo _waveOut.Init(_volumeSampleProvider); } } _waveOut.Play(); //opus _encoder = OpusEncoder.Create(INPUT_SAMPLE_RATE, 1, Application.Voip); _encoder.ForwardErrorCorrection = false; _decoder = OpusDecoder.Create(INPUT_SAMPLE_RATE, 1); _decoder.ForwardErrorCorrection = false; //speex _speex = new Preprocessor(AudioManager.SEGMENT_FRAMES, AudioManager.INPUT_SAMPLE_RATE); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Output - Quitting! " + ex.Message); ShowOutputError("Problem Initialising Audio Output!"); Environment.Exit(1); } if (micOutput != null) // && micOutput !=speakers { //TODO handle case when they're the same? try { _micWaveOut = new WasapiOut(micOutput, AudioClientShareMode.Shared, true, 40, windowsN); _micWaveOutBuffer = new BufferedWaveProvider(new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1)); _micWaveOutBuffer.ReadFully = true; _micWaveOutBuffer.DiscardOnBufferOverflow = true; var sampleProvider = _micWaveOutBuffer.ToSampleProvider(); if (micOutput.AudioClient.MixFormat.Channels == 1) { if (sampleProvider.WaveFormat.Channels == 2) { _micWaveOut.Init(new RadioFilter(sampleProvider.ToMono())); } else { //already mono _micWaveOut.Init(new RadioFilter(sampleProvider)); } } else { if (sampleProvider.WaveFormat.Channels == 1) { _micWaveOut.Init(new RadioFilter(sampleProvider.ToStereo())); } else { //already stereo _micWaveOut.Init(new RadioFilter(sampleProvider)); } } _micWaveOut.Play(); } catch (Exception ex) { Logger.Error(ex, "Error starting mic audio Output - Quitting! " + ex.Message); ShowOutputError("Problem Initialising Mic Audio Output!"); Environment.Exit(1); } } if (_audioInputSingleton.MicrophoneAvailable) { try { _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()) { BufferMilliseconds = INPUT_AUDIO_LENGTH_MS, DeviceNumber = _audioInputSingleton.SelectedAudioInputDeviceNumber() }; _waveIn.NumberOfBuffers = 2; _waveIn.DataAvailable += _waveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(INPUT_SAMPLE_RATE, 16, 1); _udpVoiceHandler = new UdpVoiceHandler(guid, ipAddress, port, _decoder, this, inputManager); var voiceSenderThread = new Thread(_udpVoiceHandler.Listen); voiceSenderThread.Start(); _waveIn.StartRecording(); MessageHub.Instance.Subscribe <SRClient>(RemoveClientBuffer); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Input - Quitting! " + ex.Message); ShowInputError("Problem initialising Audio Input!"); Environment.Exit(1); } } else { //no mic.... _udpVoiceHandler = new UdpVoiceHandler(guid, ipAddress, port, _decoder, this, inputManager); MessageHub.Instance.Subscribe <SRClient>(RemoveClientBuffer); var voiceSenderThread = new Thread(_udpVoiceHandler.Listen); voiceSenderThread.Start(); } }
void waveIn_RecordingStopped(object sender, EventArgs e) { waveIn.Dispose(); waveIn = null; }
public static void InitMicrophone() { waveIn = new WaveIn(); }
private void SettingsLoad(object sender, EventArgs e) { if (!Helper.HasFeature(Enums.Features.Settings)) { using (var cp = new CheckPassword()) { cp.ShowDialog(this); } } if (!Helper.HasFeature(Enums.Features.Settings)) { MessageBox.Show(this, LocRm.GetString("AccessDenied")); Close(); return; } UISync.Init(this); tcTabs.SelectedIndex = InitialTab; chkErrorReporting.Checked = MainForm.Conf.Enable_Error_Reporting; chkCheckForUpdates.Checked = MainForm.Conf.Enable_Update_Check; chkShowGettingStarted.Checked = MainForm.Conf.Enabled_ShowGettingStarted; _rkApp = Registry.CurrentUser.OpenSubKey(@"Software\Microsoft\Windows\CurrentVersion\Run", false); chkStartup.Checked = (_rkApp != null && _rkApp.GetValue("iSpy") != null); mediaDirectoryEditor1.Init(MainForm.Conf.MediaDirectories); btnDetectColor.BackColor = MainForm.Conf.ActivityColor.ToColor(); btnNoDetectColor.BackColor = MainForm.Conf.NoActivityColor.ToColor(); btnColorTracking.BackColor = MainForm.Conf.TrackingColor.ToColor(); btnColorVolume.BackColor = MainForm.Conf.VolumeLevelColor.ToColor(); btnColorMain.BackColor = MainForm.Conf.MainColor.ToColor(); btnColorArea.BackColor = MainForm.Conf.AreaColor.ToColor(); btnColorBack.BackColor = MainForm.Conf.BackColor.ToColor(); btnBorderHighlight.BackColor = MainForm.Conf.BorderHighlightColor.ToColor(); btnBorderDefault.BackColor = MainForm.Conf.BorderDefaultColor.ToColor(); chkAutoSchedule.Checked = MainForm.Conf.AutoSchedule; numMaxCPU.Value = MainForm.Conf.CPUMax; numMaxRecordingThreads.Value = MainForm.Conf.MaxRecordingThreads; numRedraw.Value = MainForm.Conf.MaxRedrawRate; numMediaPanelItems.Value = MainForm.Conf.PreviewItems; txtTrayIcon.Text = MainForm.Conf.TrayIconText; chkMinimise.Checked = MainForm.Conf.MinimiseOnClose; chkSpeechRecognition.Checked = MainForm.Conf.SpeechRecognition; chkMinimiseToTray.Checked = MainForm.Conf.TrayOnMinimise; if (chkMonitor.Checked && !MainForm.Conf.Monitor) { Process.Start(Program.AppPath + "iSpyMonitor.exe"); } chkMonitor.Checked = MainForm.Conf.Monitor; tbOpacity.Value = MainForm.Conf.Opacity; SetColors(); chkBalloon.Checked = MainForm.Conf.BalloonTips; txtIPCameraTimeout.Value = MainForm.Conf.IPCameraTimeout; txtServerReceiveTimeout.Value = MainForm.Conf.ServerReceiveTimeout; txtServerName.Text = MainForm.Conf.ServerName; rtbAccessList.Text = MainForm.Conf.AllowedIPList; int i = 0, selind = 0; foreach (TranslationsTranslationSet set in LocRm.TranslationSets.OrderBy(p => p.Name)) { ddlLanguage.Items.Add(new ListItem(set.Name, new[] { set.CultureCode })); if (set.CultureCode == MainForm.Conf.Language) { selind = i; } i++; } ddlLanguage.SelectedIndex = selind; chkAlertWindows.Checked = MainForm.Conf.CreateAlertWindows; chkOverlay.Checked = MainForm.Conf.ShowOverlayControls; chkInterrupt.Checked = MainForm.Conf.ScreensaverWakeup; chkEnableIPv6.Checked = !MainForm.Conf.IPv6Disabled; chkRecycle.Checked = MainForm.Conf.DeleteToRecycleBin; txtAppendLinkText.Text = MainForm.Conf.AppendLinkText; numMJPEGStreamInterval.Value = MainForm.Conf.MJPEGStreamInterval; txtAlertOnDisconnect.Text = MainForm.Conf.AlertOnDisconnect; txtAlertOnReconnect.Text = MainForm.Conf.AlertOnReconnect; txtArchive.Text = MainForm.Conf.Archive; SetSSLText(); txtAlertSubject.Text = MainForm.Conf.MailAlertSubject; txtAlertBody.Text = MainForm.Conf.MailAlertBody; txtSMSBody.Text = MainForm.Conf.SMSAlert; foreach (string s in StartupModes) { ddlStartupMode.Items.Add(LocRm.GetString(s)); } foreach (string s in Priorities) { ddlPriority.Items.Add(LocRm.GetString(s)); } ddlStartupMode.SelectedIndex = MainForm.Conf.StartupMode; foreach (var grid in MainForm.Conf.GridViews) { ddlStartUpForm.Items.Add(grid.name); } ddlPriority.SelectedIndex = MainForm.Conf.Priority - 1; ddlStartUpForm.SelectedItem = MainForm.Conf.StartupForm; if (ddlStartUpForm.SelectedItem == null) { ddlStartUpForm.SelectedIndex = 0; } ddlPlayback.Items.AddRange(PlaybackModes); if (MainForm.Conf.PlaybackMode < 0) { MainForm.Conf.PlaybackMode = 0; } if (MainForm.Conf.PlaybackMode < ddlPlayback.Items.Count) { ddlPlayback.SelectedIndex = MainForm.Conf.PlaybackMode; } try { numJPEGQuality.Value = MainForm.Conf.JPEGQuality; } catch (Exception) { } chkBigButtons.Checked = MainForm.Conf.BigButtons; selind = -1; i = 1; try { ddlTalkMic.Items.Add(LocRm.GetString("None")); for (int n = 0; n < WaveIn.DeviceCount; n++) { ddlTalkMic.Items.Add(WaveIn.GetCapabilities(n).ProductName); if (WaveIn.GetCapabilities(n).ProductName == MainForm.Conf.TalkMic) { selind = i; } i++; } ddlTalkMic.Enabled = true; if (selind > -1) { ddlTalkMic.SelectedIndex = selind; } else { if (ddlTalkMic.Items.Count == 1) { ddlTalkMic.Items.Add(_noDevices); ddlTalkMic.Enabled = false; ddlTalkMic.SelectedIndex = 1; } else { ddlTalkMic.SelectedIndex = 0; } } } catch (ApplicationException ex) { Logger.LogExceptionToFile(ex); ddlTalkMic.Items.Add(_noDevices); ddlTalkMic.Enabled = false; } ddlJoystick.Items.Add(LocRm.GetString("None")); _jst = new JoystickDevice(); var ij = 0; _sticks = _jst.FindJoysticks(); i = 1; foreach (string js in _sticks) { var nameid = js.Split('|'); ddlJoystick.Items.Add(nameid[0]); if (nameid[1] == MainForm.Conf.Joystick.id) { ij = i; } i++; } ddlJoystick.SelectedIndex = ij; jaxis1.ID = MainForm.Conf.Joystick.XAxis; jaxis1.SupportDPad = true; jaxis1.Invert = MainForm.Conf.Joystick.InvertXAxis; jaxis2.ID = MainForm.Conf.Joystick.YAxis; jaxis2.Invert = MainForm.Conf.Joystick.InvertYAxis; jaxis3.ID = MainForm.Conf.Joystick.ZAxis; jaxis3.Invert = MainForm.Conf.Joystick.InvertZAxis; jbutton1.ID = MainForm.Conf.Joystick.Record; jbutton2.ID = MainForm.Conf.Joystick.Snapshot; jbutton3.ID = MainForm.Conf.Joystick.Talk; jbutton4.ID = MainForm.Conf.Joystick.Listen; jbutton5.ID = MainForm.Conf.Joystick.Play; jbutton6.ID = MainForm.Conf.Joystick.Next; jbutton7.ID = MainForm.Conf.Joystick.Previous; jbutton8.ID = MainForm.Conf.Joystick.Stop; jbutton9.ID = MainForm.Conf.Joystick.MaxMin; jbutton1.GetInput += JbuttonGetInput; jbutton2.GetInput += JbuttonGetInput; jbutton3.GetInput += JbuttonGetInput; jbutton4.GetInput += JbuttonGetInput; jbutton5.GetInput += JbuttonGetInput; jbutton6.GetInput += JbuttonGetInput; jbutton7.GetInput += JbuttonGetInput; jbutton8.GetInput += JbuttonGetInput; jbutton9.GetInput += JbuttonGetInput; jaxis1.GetInput += JaxisGetInput; jaxis2.GetInput += JaxisGetInput; jaxis3.GetInput += JaxisGetInput; chkGZip.Checked = MainForm.Conf.EnableGZip; numDisconnectNotification.Value = MainForm.Conf.DisconnectNotificationDelay; mediaDirectoryEditor1.Enabled = Helper.HasFeature(Enums.Features.Storage); HideTab(tabPage11, Helper.HasFeature(Enums.Features.Plugins)); //important leave here: chkPasswordProtect.Checked = MainForm.Conf.Enable_Password_Protect; if (Helper.HasFeature(Enums.Features.Plugins)) { ListPlugins(); } chkUseiSpy.Checked = !MainForm.Conf.UseSMTP; txtSMTPFromAddress.Text = MainForm.Conf.SMTPFromAddress; txtSMTPUsername.Text = MainForm.Conf.SMTPUsername; txtSMTPPassword.Text = MainForm.Conf.SMTPPassword; txtSMTPServer.Text = MainForm.Conf.SMTPServer; chkSMTPUseSSL.Checked = MainForm.Conf.SMTPSSL; numSMTPPort.Value = MainForm.Conf.SMTPPort; ftpEditor1.Init(MainForm.Conf.FTPServers); chkOpenGrabs.Checked = MainForm.Conf.OpenGrabs; numFileCache.Value = MainForm.Conf.VLCFileCache; rtbReferrers.Text = MainForm.Conf.Referers; chkPasswordProtectOnStart.Checked = MainForm.Conf.Password_Protect_Startup; chkEnableLogging.Checked = MainForm.Conf.Logging.Enabled; numMaxLogSize.Value = MainForm.Conf.Logging.FileSize; numKeepLogs.Value = MainForm.Conf.Logging.KeepDays; _loaded = true; }
private void Disconnect() { if (this.currentState != UiState.Connected) { return; } UpdateUiState(UiState.Disconnecting); miniwindow.Hide(); if (recorder != null) { recorder.StopRecording(); recorder.DataAvailable -= OnRecorderDataAvailable; recorder.Dispose(); recorder = null; } if (streamAudioFromFileInterrupt != null) { streamAudioFromFileInterrupt.Cancel(); streamAudioFromFileInterrupt = null; } if (player != null) { player.Stop(); player.Dispose(); player = null; } // Close the audio file if logging if (audioSent != null) { audioSent.Flush(); audioSent.Dispose(); audioSent = null; } if (this.audioReceived != null) { this.audioReceived.Dispose(); this.audioReceived = null; } var task = s2smtClient.Disconnect() .ContinueWith((t) => { if (t.IsFaulted) { this.Log(t.Exception, "E: Disconnect call to client failed."); } s2smtClient.Dispose(); s2smtClient = null; }) .ContinueWith((t) => { if (t.IsFaulted) { this.Log(t.Exception, "E: Disconnected but there were errors."); } else { this.Log("I: Disconnected."); } this.SafeInvoke(() => { this.AutoSaveLogs(); this.UpdateUiState(UiState.ReadyToConnect); }); }); }
public void StartPreview(int mic, MMDevice speakers) { try { _waveOut = new WasapiOut(speakers, AudioClientShareMode.Shared, true, 40); _buffBufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1)); _buffBufferedWaveProvider.ReadFully = true; _buffBufferedWaveProvider.DiscardOnBufferOverflow = true; RadioFilter filter = new RadioFilter(_buffBufferedWaveProvider.ToSampleProvider()); //add final volume boost to all mixed audio _volumeSampleProvider = new VolumeSampleProviderWithPeak(filter, (peak => SpeakerMax = peak)); _volumeSampleProvider.Volume = SpeakerBoost; if (speakers.AudioClient.MixFormat.Channels == 1) { if (_volumeSampleProvider.WaveFormat.Channels == 2) { _waveOut.Init(_volumeSampleProvider.ToMono()); } else { //already mono _waveOut.Init(_volumeSampleProvider); } } else { if (_volumeSampleProvider.WaveFormat.Channels == 1) { _waveOut.Init(_volumeSampleProvider.ToStereo()); } else { //already stereo _waveOut.Init(_volumeSampleProvider); } } _waveOut.Play(); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Output - Quitting! " + ex.Message); MessageBox.Show($"Problem Initialising Audio Output! Try a different Output device and please post your client log on the forums", "Audio Output Error", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(1); } try { //opus _encoder = OpusEncoder.Create(AudioManager.INPUT_SAMPLE_RATE, 1, FragLabs.Audio.Codecs.Opus.Application.Voip); _encoder.ForwardErrorCorrection = false; _decoder = OpusDecoder.Create(AudioManager.INPUT_SAMPLE_RATE, 1); _decoder.ForwardErrorCorrection = false; _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()) { BufferMilliseconds = AudioManager.INPUT_AUDIO_LENGTH_MS, DeviceNumber = mic }; _waveIn.NumberOfBuffers = 2; _waveIn.DataAvailable += _waveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1); _waveIn.StartRecording(); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Input - Quitting! " + ex.Message); MessageBox.Show($"Problem Initialising Audio Input! Try a different Input device and please post your client log on the forums", "Audio Input Error", MessageBoxButton.OK, MessageBoxImage.Error); Environment.Exit(1); } }
public MainWindow() { InitializeComponent(); Debug.Print("This is a debug message"); miniwindow = new MiniWindow(); this.Closing += MainWindow_Closing; int waveInDevices = WaveIn.DeviceCount; //how many recording devices are there on the device for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) //loop through and find all of the devices { WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice); Mic.Items.Add(new ComboBoxItem() { Content = deviceInfo.ProductName, Tag = waveInDevice }); //add the devices to the combo box to show the user } // Special case: audio source is a file Mic.Items.Add(new ComboBoxItem() { Content = "Play audio from file", Tag = "File" }); Mic.SelectedIndex = Properties.Settings.Default.MicIndex; int waveOutDevices = WaveOut.DeviceCount; //get the waveout device count for (int waveOutDevice = 0; waveOutDevice < waveOutDevices; waveOutDevice++) //get all the wavout audio devices on the device and put them in a combo box { WaveOutCapabilities deviceInfo = WaveOut.GetCapabilities(waveOutDevice); Speaker.Items.Add(new ComboBoxItem() { Content = deviceInfo.ProductName, Tag = waveOutDevice }); } Speaker.SelectedIndex = Properties.Settings.Default.SpeakerIndex; MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "1" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "2" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "3" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "4" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "5" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "6" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "7" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "8" }); MiniWindow_Lines.Items.Add(new ComboBoxItem() { Content = "9" }); MiniWindow_Lines.SelectedIndex = Properties.Settings.Default.MiniWindow_Lines; miniwindow.SetFontSize(Properties.Settings.Default.MiniWindow_Lines); ShowMiniWindow.IsChecked = Properties.Settings.Default.ShowMiniWindow; FeatureTTS.IsChecked = Properties.Settings.Default.TTS; CutInputAudioCheckBox.IsChecked = Properties.Settings.Default.CutInputDuringTTS; FeaturePartials.IsChecked = Properties.Settings.Default.PartialResults; Voice.SelectedIndex = Properties.Settings.Default.VoiceIndex; UpdateLanguageSettings(); //call a function with no arguments }
private void Connect() { if (this.currentState != UiState.ReadyToConnect) { return; } Stopwatch watch = Stopwatch.StartNew(); UpdateUiState(UiState.Connecting); if (ShowMiniWindow.IsChecked.Value) { miniwindow.Show(); } //This section is putting default values in case there are missing values in the UI // Minimal validation if (this.IsMissingInput(this.FromLanguage.SelectedItem, "source language")) { return; } if (this.IsMissingInput(this.ToLanguage.SelectedItem, "target language")) { return; } //if (this.IsMissingInput(this.Voice.SelectedItem, "voice")) return; if (this.IsMissingInput(this.Profanity.SelectedItem, "profanity filter")) { return; } if (this.IsMissingInput(this.Mic.SelectedItem, "microphone")) { return; } if (this.IsMissingInput(this.Speaker.SelectedItem, "speaker")) { return; } if (this.LogAutoSave.IsChecked.Value) { this.autoSaveFrom = this.Logs.Items.Count; } string tag = ((ComboBoxItem)Mic.SelectedItem).Tag as string; string audioFileInputPath = null; if (tag == "File") { audioFileInputPath = this.AudioFileInput.Text; if (!File.Exists(audioFileInputPath)) { SetMessage(String.Format("Invalid audio source: selected file does not exist."), "", MessageKind.Error); UpdateUiState(UiState.ReadyToConnect); return; } } bool shouldSuspendInputAudioDuringTTS = this.CutInputAudioCheckBox.IsChecked.HasValue ? this.CutInputAudioCheckBox.IsChecked.Value : false; this.correlationId = Guid.NewGuid().ToString("D").Split('-')[0].ToUpperInvariant(); // Setup speech translation client options SpeechClientOptions options; string voicename = ""; if (this.Voice.SelectedItem != null) { voicename = ((ComboBoxItem)this.Voice.SelectedItem).Tag.ToString(); } options = new SpeechTranslateClientOptions() { TranslateFrom = ((ComboBoxItem)this.FromLanguage.SelectedItem).Tag.ToString(), TranslateTo = ((ComboBoxItem)this.ToLanguage.SelectedItem).Tag.ToString(), Voice = voicename, }; options.Hostname = baseUrl; options.AuthHeaderKey = "Authorization"; options.AuthHeaderValue = ""; // set later in ConnectAsync. options.ClientAppId = new Guid("EA66703D-90A8-436B-9BD6-7A2707A2AD99"); options.CorrelationId = this.correlationId; options.Features = GetFeatures().ToString().Replace(" ", ""); options.Profanity = ((SpeechClient.ProfanityFilter)Enum.Parse(typeof(SpeechClient.ProfanityFilter), ((ComboBoxItem)this.Profanity.SelectedItem).Tag.ToString(), true)).ToString(); // Setup player and recorder but don't start them yet. WaveFormat waveFormat = new WaveFormat(16000, 16, 1); // WaveProvider for incoming TTS // We use a rather large BVufferDuration because we need to be able to hold an entire utterance. // TTS audio is received in bursts (faster than real-time). textToSpeechBytes = 0; playerTextToSpeechWaveProvider = new BufferedWaveProvider(waveFormat); playerTextToSpeechWaveProvider.BufferDuration = TimeSpan.FromMinutes(5); ISampleProvider sampleProvider = null; if (audioFileInputPath != null) { // Setup mixing of audio from input file and from TTS playerAudioInputWaveProvider = new BufferedWaveProvider(waveFormat); var srce1 = new Pcm16BitToSampleProvider(playerTextToSpeechWaveProvider); var srce2 = new Pcm16BitToSampleProvider(playerAudioInputWaveProvider); var mixer = new MixingSampleProvider(srce1.WaveFormat); mixer.AddMixerInput(srce1); mixer.AddMixerInput(srce2); sampleProvider = mixer; } else { recorder = new WaveIn(); recorder.DeviceNumber = (int)((ComboBoxItem)Mic.SelectedItem).Tag; recorder.WaveFormat = waveFormat; recorder.DataAvailable += OnRecorderDataAvailable; sampleProvider = playerTextToSpeechWaveProvider.ToSampleProvider(); } player = new WaveOut(); player.DeviceNumber = (int)((ComboBoxItem)Speaker.SelectedItem).Tag; player.Init(sampleProvider); this.audioBytesSent = 0; string logAudioFileName = null; if (LogSentAudio.IsChecked.Value || LogReceivedAudio.IsChecked.Value) { string logAudioPath = System.IO.Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), Properties.Settings.Default.OutputDirectory); try { Directory.CreateDirectory(logAudioPath); } catch { this.AddItemToLog(string.Format("Could not create folder {0}", logAudioPath)); } if (LogSentAudio.IsChecked.Value) { logAudioFileName = System.IO.Path.Combine(logAudioPath, string.Format("audiosent_{0}.wav", this.correlationId)); } if (LogReceivedAudio.IsChecked.Value) { string fmt = System.IO.Path.Combine(logAudioPath, string.Format("audiotts_{0}_{{0}}.wav", this.correlationId)); this.audioReceived = new BinaryMessageDecoder(fmt); } } ConnectAsync(options, shouldSuspendInputAudioDuringTTS).ContinueWith((t) => { if (t.IsFaulted || t.IsCanceled || !s2smtClient.IsConnected()) //t.isfaulted OR t.iscancelled OR NOT s2smtclient.isconnected() do the following { this.Log(t.Exception, "E: Unable to connect: cid='{0}', elapsedMs='{1}'.", this.correlationId, watch.ElapsedMilliseconds); this.SafeInvoke(() => { this.AutoSaveLogs(); this.UpdateUiState(UiState.ReadyToConnect); }); } else { // Start playing incoming audio player.Play(); // Start recording and sending if (logAudioFileName != null) { audioSent = new WaveFileWriter(logAudioFileName, waveFormat); this.Log("I: Recording outgoing audio in {0}", logAudioFileName); } // Send the WAVE header s2smtClient.SendBinaryMessage(new ArraySegment <byte>(GetWaveHeader(waveFormat))); if (audioFileInputPath != null) { streamAudioFromFileInterrupt = new CancellationTokenSource(); Task.Run(() => this.StreamFile(audioFileInputPath, streamAudioFromFileInterrupt.Token)) .ContinueWith((x) => { if (x.IsFaulted) { this.Log(x.Exception, "E: Error while playing audio from input file."); } else { this.Log("I: Done playing audio from input file."); } }); } else { // Start sending audio from the recoder. recorder.StartRecording(); } this.Log("I: Connected: cid='{0}', elapsedMs='{1}'.", this.correlationId, watch.ElapsedMilliseconds); this.SafeInvoke(() => this.UpdateUiState(UiState.Connected)); } }).ContinueWith((t) => { if (t.IsFaulted) { Log(t.Exception, "E: Failed to start sending audio."); this.SafeInvoke(() => { this.AutoSaveLogs(); this.UpdateUiState(UiState.ReadyToConnect); }); } }); }
private void button1_Click(object sender, EventArgs e) { playersForm.StopAll(); Remote remote = settings.Remotes[comboBox1.SelectedIndex]; try { farEnd = remote.GetIPEndPoint(); } catch { MessageBox.Show("This is not a valid IP Address"); return; } button2.Enabled = true; button1.Enabled = false; label5.Visible = true; segmentFrames = 960; encoder = OpusEncoder.Create(48000, 1, FragLabs.Audio.Codecs.Opus.Application.Audio); Decoder = new DGDecoder(remote.CodecType, 48000, 1); encoder.Bitrate = 64000; bytesPerSegment = encoder.FrameByteCount(segmentFrames); waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()); waveIn.DeviceNumber = inputList.SelectedIndex; waveIn.DataAvailable += waveIn_DataAvailable; waveIn.WaveFormat = new WaveFormat(48000, 16, 1); liveInput = new BufferedWaveProvider(waveIn.WaveFormat); List <ISampleProvider> sampleProviders = new List <ISampleProvider>(); sampleProviders.Add(liveInput.ToSampleProvider()); sampleProviders.Add(playersForm.Mixer); MixingSampleProvider = new MixingSampleProvider(sampleProviders); playBuffer = new BufferedWaveProvider(new WaveFormat(44100, 16, 2)); playBuffer.DiscardOnBufferOverflow = true; waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback()); waveOut.DeviceNumber = outputList.SelectedIndex; playersForm.PlaybackDeviceNum = outputList.SelectedIndex; session = new ComrexSession(); session.SetDestination(SDPMediaTypesEnum.audio, farEnd, farEnd); session.OnRtpPacketReceived += Session_OnRtpPacketReceived; session.Start(); killsession = false; waveOut.Init(playBuffer); waveIn.StartRecording(); if (timer == null) { timer = new Timer(); timer.Interval = 100; timer.Tick += timer_Tick; } timer.Start(); comboBox1.Enabled = false; outputList.Enabled = false; inputList.Enabled = false; }
public static Input ToChainElement(this WaveIn waveIn) { return(new Input(waveIn)); }
public void Start(string serverIpAddress, string clientID, string inputDevice, string outputDevice) { _startTime = DateTime.Now; callsign = clientID; taskDataPub = new Task(() => TaskDataPub(cancelTokenSource.Token, dataPublishInputQueue, "tcp://" + serverIpAddress + ":60001"), TaskCreationOptions.LongRunning); taskDataPub.Start(); taskDataSub = new Task(() => TaskDataSub(cancelTokenSource.Token, "tcp://" + serverIpAddress + ":60000"), TaskCreationOptions.LongRunning); taskDataSub.Start(); taskAudioPub = new Task(() => TaskAudioPub(cancelTokenSource.Token, audioPublishInputQueue, "tcp://" + serverIpAddress + ":60003"), TaskCreationOptions.LongRunning); taskAudioPub.Start(); taskAudioSub = new Task(() => TaskAudioSub(cancelTokenSource.Token, audioPlaybackQueue, "tcp://" + serverIpAddress + ":60002"), TaskCreationOptions.LongRunning); taskAudioSub.Start(); _segmentFrames = 960; _encoder = OpusEncoder.Create(48000, 1, FragLabs.Audio.Codecs.Opus.Application.Voip); _encoder.Bitrate = 65536; _decoder = OpusDecoder.Create(48000, 1); _bytesPerSegment = _encoder.FrameByteCount(_segmentFrames); _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()); _waveIn.BufferMilliseconds = 50; Console.WriteLine("Input device: " + WaveIn.GetCapabilities(0).ProductName); _waveIn.DeviceNumber = MapInputDevice(inputDevice); _waveIn.DataAvailable += _waveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(48000, 16, 1); networkAudioBuffers = new List <ClientMixerInput> { new ClientMixerInput() { InUse = false, Provider = new BufferedWaveProvider(new WaveFormat(48000, 16, 1)) //Provider = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(48000, 1)) }, new ClientMixerInput() { InUse = false, Provider = new BufferedWaveProvider(new WaveFormat(48000, 16, 1)) //Provider = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(48000, 1)) }, new ClientMixerInput() { InUse = false, Provider = new BufferedWaveProvider(new WaveFormat(48000, 16, 1)) //Provider = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(48000, 1)) }, new ClientMixerInput() { InUse = false, Provider = new BufferedWaveProvider(new WaveFormat(48000, 16, 1)) //Provider = new BufferedWaveProvider(WaveFormat.CreateIeeeFloatWaveFormat(48000, 1)) } }; mixer = new MixingWaveProvider32(); //mixer.ReadFully = true; foreach (var buffer in networkAudioBuffers) { mixer.AddInputStream(new SampleToWaveProvider(new Equalizer(new WaveToSampleProvider(new Wave16ToFloatProvider(buffer.Provider)), bands))); } //_playBuffer = new BufferedWaveProvider(mixer.WaveFormat); //mixer.AddInputStream(_playBuffer); taskAudioPlayback = new Task(() => TaskAudioPlayback(cancelTokenSource.Token, audioPlaybackQueue), TaskCreationOptions.LongRunning); taskAudioPlayback.Start(); //_waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback()); _waveOut = new WaveOut(); Console.WriteLine("Output device: " + WaveOut.GetCapabilities(0).ProductName); _waveOut.DeviceNumber = MapOutputDevice(outputDevice); _waveOut.DesiredLatency = 200; //Default is 300 //_waveOut.Init(_playBuffer); _waveOut.Init(mixer); _waveOut.Play(); _waveIn.StartRecording(); if (_timer == null) { _timer = new System.Timers.Timer(); _timer.Interval = 1000; _timer.Elapsed += _timer_Elapsed; } _timer.Start(); started = true; }
public Settings() { Logger.Log("Initializing Settings"); InitializeComponent(); linkLabel1.Links.Add(new LinkLabel.Link() { LinkData = "https://github.com/Mnaukal/virtual-loop-pedal" }); linkLabel2.Links.Add(new LinkLabel.Link() { LinkData = "https://github.com/naudio/NAudio" }); Properties.Settings settings = Properties.Settings.Default; // load saved values numericUpDown_sampleRate.Value = settings.SampleRate; numericUpDown_latency.Value = settings.DesiredLatency; numericUpDown_bufferSize.Value = settings.BufferSize; radioButton_waveOutEvent.Checked = (settings.Driver == "WaveEvent"); radioButton_asio.Checked = (settings.Driver == "ASIO"); radioButton_wasapi.Checked = (settings.Driver == "Wasapi"); // enumerate WaveOut devices Logger.Log("Loading WaveOut devices"); if (WaveOut.DeviceCount > 0) { for (var deviceId = -1; deviceId < WaveOut.DeviceCount; deviceId++) { var capabilities = WaveOut.GetCapabilities(deviceId); comboBox_outputWave.Items.Add($"Device {deviceId} ({capabilities.ProductName})"); } comboBox_outputWave.SelectedIndex = (Owner as Pedal) != null ? (Owner as Pedal).settings.WaveOutDeviceNumber + 1 : 0; } Logger.Log("WaveOut devices loaded"); // enumerate WaveIn devices Logger.Log("Loading WaveIn devices"); if (WaveIn.DeviceCount > 0) { for (var deviceId = -1; deviceId < WaveIn.DeviceCount; deviceId++) { var capabilities = WaveIn.GetCapabilities(deviceId); comboBox_inputWave.Items.Add($"Device {deviceId} ({capabilities.ProductName})"); } comboBox_inputWave.SelectedIndex = (Owner as Pedal) != null ? (Owner as Pedal).settings.WaveInDeviceNumber + 1 : 0; } Logger.Log("WaveIn devices loaded"); // enumerate ASIO devices Logger.Log("Loading ASIO devices"); try { var asioDriverNames = AsioOut.GetDriverNames(); foreach (string driverName in asioDriverNames) { comboBox_asioDriver.Items.Add(driverName); } comboBox_asioDriver.SelectedIndex = 0; Logger.Log("ASIO devices loaded"); } catch { // ASIO driver not available label_noAsio.Visible = true; Logger.Log("ASIO driver not available, disabling ASIO"); } // enumerate WASAPI devices Logger.Log("Loading WASAPI devices"); MMDeviceEnumerator enumerator = new MMDeviceEnumerator(); MMDeviceCollection endPoints = enumerator.EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active); List <WasapiDeviceComboItem> outComboItems = new List <WasapiDeviceComboItem>(); foreach (MMDevice endPoint in endPoints) { outComboItems.Add(new WasapiDeviceComboItem() { Description = endPoint.FriendlyName + " (" + endPoint.DeviceFriendlyName + ")", Device = endPoint }); } comboBox_outputWasapi.DisplayMember = "Description"; comboBox_outputWasapi.ValueMember = "Device"; comboBox_outputWasapi.DataSource = outComboItems; endPoints = enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active); List <WasapiDeviceComboItem> inComboItems = new List <WasapiDeviceComboItem>(); foreach (MMDevice endPoint in endPoints) { inComboItems.Add(new WasapiDeviceComboItem() { Description = endPoint.FriendlyName + " (" + endPoint.DeviceFriendlyName + ")", Device = endPoint }); } comboBox_inputWasapi.DisplayMember = "Description"; comboBox_inputWasapi.ValueMember = "Device"; comboBox_inputWasapi.DataSource = inComboItems; Logger.Log("WASAPI devices loaded"); Logger.Log("Settings initialized successfully"); }
private void MicrophoneSourceLoad(object sender, EventArgs e) { UISync.Init(this); tableLayoutPanel2.Enabled = VlcHelper.VlcInstalled; linkLabel3.Visible = lblInstallVLC.Visible = !tableLayoutPanel2.Enabled; cmbVLCURL.Text = MainForm.Conf.VLCURL; cmbVLCURL.Items.AddRange(ObjectList(MainForm.Conf.RecentVLCList)); cmbFFMPEGURL.Items.AddRange(ObjectList(MainForm.Conf.RecentVLCList)); ddlSampleRate.Items.AddRange(SampleRates); try { int selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { ddlDevice.Items.Add(WaveIn.GetCapabilities(n).ProductName); if (WaveIn.GetCapabilities(n).ProductName == Mic.settings.sourcename) { selind = n; } } ddlDevice.Enabled = true; if (selind > -1) { ddlDevice.SelectedIndex = selind; } else { if (ddlDevice.Items.Count == 0) { ddlDevice.Items.Add(_noDevices); ddlDevice.Enabled = false; } else { ddlDevice.SelectedIndex = 0; } } } catch (ApplicationException ex) { Logger.LogExceptionToFile(ex); ddlDevice.Items.Add(_noDevices); ddlDevice.Enabled = false; } ddlSampleRate.SelectedIndex = 0; foreach (var mic in MainForm.Microphones) { if (mic.id != Mic.id && mic.settings.typeindex != 5) //dont allow a clone of a clone as the events get too complicated (and also it's pointless) { ddlCloneMicrophone.Items.Add(new MainForm.ListItem(mic.name, mic.id)); } } SetSourceIndex(Mic.settings.typeindex); switch (Mic.settings.typeindex) { case 0: if (ddlDevice.Items.Count > 0) { tcAudioSource.SelectedIndex = 0; int j = 0; foreach (int s in ddlSampleRate.Items) { if (s == Mic.settings.samples) { ddlSampleRate.SelectedIndex = j; } j++; } } break; case 1: txtNetwork.Text = Mic.settings.sourcename; break; case 2: cmbVLCURL.Text = Mic.settings.sourcename; break; case 3: cmbFFMPEGURL.Text = Mic.settings.sourcename; break; case 4: int i; Int32.TryParse(Mic.settings.sourcename, out i); var c = MainForm.Cameras.SingleOrDefault(p => p.id == i); lblCamera.Text = c == null?LocRm.GetString("Removed") : c.name; break; case 5: int id; if (Int32.TryParse(Mic.settings.sourcename, out id)) { foreach (MainForm.ListItem li in ddlCloneMicrophone.Items) { if ((int)li.Value == id) { ddlCloneMicrophone.SelectedItem = li; break; } } } break; case 6: txtWavStreamURL.Text = Mic.settings.sourcename; break; } txtVLCArgs.Text = Mic.settings.vlcargs.Replace("\r\n", "\n").Replace("\n\n", "\n").Replace("\n", Environment.NewLine); numAnalyseDuration.Value = Mic.settings.analyzeduration; }
public void StartPreview(int mic, MMDevice speakers, bool windowsN) { try { _globalSettings = GlobalSettingsStore.Instance; _waveOut = new WasapiOut(speakers, AudioClientShareMode.Shared, true, 40, windowsN); _buffBufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1)); _buffBufferedWaveProvider.ReadFully = true; _buffBufferedWaveProvider.DiscardOnBufferOverflow = true; RadioFilter filter = new RadioFilter(_buffBufferedWaveProvider.ToSampleProvider()); CachedLoopingAudioProvider natoEffect = new CachedLoopingAudioProvider(filter.ToWaveProvider16(), new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1), CachedAudioEffect.AudioEffectTypes.NATO_TONE); //add final volume boost to all mixed audio _volumeSampleProvider = new VolumeSampleProviderWithPeak(natoEffect.ToSampleProvider(), (peak => SpeakerMax = (float)VolumeConversionHelper.ConvertFloatToDB(peak))); _volumeSampleProvider.Volume = SpeakerBoost; if (speakers.AudioClient.MixFormat.Channels == 1) { if (_volumeSampleProvider.WaveFormat.Channels == 2) { _waveOut.Init(_volumeSampleProvider.ToMono()); } else { //already mono _waveOut.Init(_volumeSampleProvider); } } else { if (_volumeSampleProvider.WaveFormat.Channels == 1) { _waveOut.Init(_volumeSampleProvider.ToStereo()); } else { //already stereo _waveOut.Init(_volumeSampleProvider); } } _waveOut.Play(); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Output - Quitting! " + ex.Message); ShowOutputError("Problem Initialising Audio Output!"); Environment.Exit(1); } try { _speex = new Preprocessor(AudioManager.SEGMENT_FRAMES, AudioManager.INPUT_SAMPLE_RATE); //opus _encoder = OpusEncoder.Create(AudioManager.INPUT_SAMPLE_RATE, 1, FragLabs.Audio.Codecs.Opus.Application.Voip); _encoder.ForwardErrorCorrection = false; _decoder = OpusDecoder.Create(AudioManager.INPUT_SAMPLE_RATE, 1); _decoder.ForwardErrorCorrection = false; _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()) { BufferMilliseconds = AudioManager.INPUT_AUDIO_LENGTH_MS, DeviceNumber = mic }; _waveIn.NumberOfBuffers = 2; _waveIn.DataAvailable += _waveIn_DataAvailable; _waveIn.WaveFormat = new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1); //debug wave file //_waveFile = new WaveFileWriter(@"C:\Temp\Test-Preview.wav", _waveIn.WaveFormat); _waveIn.StartRecording(); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Input - Quitting! " + ex.Message); ShowInputError(); Environment.Exit(1); } }
private void Finish() { int sourceIndex = GetSourceIndex(); switch (sourceIndex) { case 0: if (!ddlDevice.Enabled) { Close(); return; } Mic.settings.sourcename = ddlDevice.SelectedItem.ToString(); int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { ddlDevice.Items.Add(WaveIn.GetCapabilities(n).ProductName); if (WaveIn.GetCapabilities(n).ProductName == Mic.settings.sourcename) { selind = i; } i++; } int channels = WaveIn.GetCapabilities(selind).Channels; Mic.settings.channels = channels; Mic.settings.samples = Convert.ToInt32(ddlSampleRate.SelectedItem); Mic.settings.bits = 16; break; case 1: try { var url = new Uri(txtNetwork.Text); } catch (Exception ex) { MessageBox.Show(ex.Message); return; } Mic.settings.sourcename = txtNetwork.Text; //set format Mic.settings.channels = 1; Mic.settings.samples = 22050; Mic.settings.bits = 16; break; case 2: { string t = cmbVLCURL.Text.Trim(); if (t == String.Empty) { MessageBox.Show(LocRm.GetString("Validate_Microphone_SelectSource"), LocRm.GetString("Error")); return; } Mic.settings.sourcename = t; } break; case 3: try { var url = new Uri(cmbFFMPEGURL.Text); } catch (Exception ex) { MessageBox.Show(ex.Message); return; } Mic.settings.sourcename = cmbFFMPEGURL.Text; break; case 5: if (ddlCloneMicrophone.SelectedIndex > -1) { int micid = (int)((MainForm.ListItem)ddlCloneMicrophone.SelectedItem).Value; Mic.settings.sourcename = micid.ToString(CultureInfo.InvariantCulture); var mic = MainForm.Microphones.First(p => p.id == micid); Mic.name = "Clone: " + mic.name; } else { MessageBox.Show(this, LocRm.GetString("SelectMicrophoneToClone")); return; } break; case 6: try { var url = new Uri(txtWavStreamURL.Text); } catch (Exception ex) { MessageBox.Show(ex.Message); return; } Mic.settings.sourcename = txtWavStreamURL.Text; //set default format Mic.settings.channels = 1; Mic.settings.samples = 16000; Mic.settings.bits = 16; break; } MainForm.Conf.VLCURL = cmbVLCURL.Text.Trim(); if (!MainForm.Conf.RecentVLCList.Contains(MainForm.Conf.VLCURL) && MainForm.Conf.VLCURL != "") { MainForm.Conf.RecentVLCList = (MainForm.Conf.RecentVLCList + "|" + MainForm.Conf.VLCURL).Trim('|'); } Mic.settings.typeindex = sourceIndex; Mic.settings.decompress = true; // chkDecompress.Checked; Mic.settings.vlcargs = txtVLCArgs.Text.Trim(); Mic.settings.analyzeduration = (int)numAnalyseDuration.Value; DialogResult = DialogResult.OK; Close(); }
private void SetSelectedWaveIn() { waveIn?.Dispose(); waveIn = deviceService.CreateWaveIn(Int32.Parse(WaveInDevicesListBox.SelectedValue.ToString())); }
void StartRecording() { var SelectedAudioSourceId = AudioSettings.SelectedAudioSourceId; var SelectedVideoSourceKind = VideoSettings.SelectedVideoSourceKind; var SelectedVideoSource = VideoSettings.SelectedVideoSource; var Encoder = VideoSettings.Encoder; Duration = OtherSettings.CaptureDuration; Delay = OtherSettings.StartDelay; if (Duration != 0 && (Delay * 1000 > Duration)) { Status.Content = "Delay cannot be greater than Duration"; SystemSounds.Asterisk.Play(); return; } if (OtherSettings.MinimizeOnStart) { WindowState = WindowState.Minimized; } VideoSettings.Instance.VideoSourceKindBox.IsEnabled = false; VideoSettings.Instance.VideoSourceBox.IsEnabled = SelectedVideoSourceKind == VideoSourceKind.Window; // UI Buttons RecordButton.ToolTip = "Stop"; RecordButton.IconData = (RectangleGeometry)FindResource("StopIcon"); ReadyToRecord = false; int temp; string Extension = SelectedVideoSourceKind == VideoSourceKind.NoVideo ? (AudioSettings.EncodeAudio && int.TryParse(SelectedAudioSourceId, out temp) ? ".mp3" : ".wav") : (Encoder.Name == "Gif" ? ".gif" : ".avi"); lastFileName = Path.Combine(OutPath.Text, DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss") + Extension); Status.Content = Delay > 0 ? string.Format("Recording from t={0}ms...", Delay) : "Recording..."; DTimer.Stop(); Seconds = Minutes = 0; TimeManager.Content = "00:00"; DTimer.Start(); int AudioBitRate = App.IsLamePresent ? Mp3EncoderLame.SupportedBitRates[AudioSettings.AudioQuality] : 0; IAudioProvider AudioSource = null; WaveFormat wf = new WaveFormat(44100, 16, AudioSettings.Stereo ? 2 : 1); if (SelectedAudioSourceId != "-1") { int i; if (int.TryParse(SelectedAudioSourceId, out i)) { AudioSource = new WaveIn(i, VideoSettings.FrameRate, wf); } else { AudioSource = new WasapiLoopbackCapture(WasapiAudioDevice.Get(SelectedAudioSourceId), true); wf = AudioSource.WaveFormat; } } #region ImageProvider IImageProvider ImgProvider = null; Func <System.Windows.Media.Color, System.Drawing.Color> ConvertColor = (C) => System.Drawing.Color.FromArgb(C.A, C.R, C.G, C.B); var mouseKeyHook = new MouseKeyHook(OtherSettings.CaptureClicks, OtherSettings.CaptureKeystrokes); if (SelectedVideoSourceKind == VideoSourceKind.Window) { var Src = SelectedVideoSource as WindowVSLI; if (Src.Handle == RegionSelector.Instance.Handle && OtherSettings.StaticRegionCapture) { ImgProvider = new StaticRegionProvider(RegionSelector.Instance, cursor, mouseKeyHook); VideoSettings.Instance.VideoSourceBox.IsEnabled = false; } else { ImgProvider = new WindowProvider(() => (VideoSettings.SelectedVideoSource as WindowVSLI).Handle, ConvertColor(VideoSettings.BackgroundColor), cursor, mouseKeyHook); } } else if (SelectedVideoSourceKind == VideoSourceKind.Screen) { ImgProvider = new ScreenProvider((SelectedVideoSource as ScreenVSLI).Screen, cursor, mouseKeyHook); } #endregion #region VideoEncoder IVideoFileWriter VideoEncoder = null; if (Encoder.Name == "Gif") { if (GifSettings.UnconstrainedGif) { Recorder = new UnconstrainedFrameRateGifRecorder( new GifWriter(lastFileName, Repeat: GifSettings.GifRepeat ? GifSettings.GifRepeatCount : -1), ImgProvider); } else { VideoEncoder = new GifWriter(lastFileName, 1000 / VideoSettings.FrameRate, GifSettings.GifRepeat ? GifSettings.GifRepeatCount : -1); } } else if (SelectedVideoSourceKind != VideoSourceKind.NoVideo) { VideoEncoder = new AviWriter(lastFileName, ImgProvider, Encoder, VideoSettings.VideoQuality, VideoSettings.FrameRate, AudioSource, AudioBitRate == 0 ? null : new Mp3EncoderLame(wf.Channels, wf.SampleRate, AudioBitRate)); } #endregion if (Recorder == null) { if (SelectedVideoSourceKind == VideoSourceKind.NoVideo) { if (AudioSettings.EncodeAudio) { Recorder = new AudioRecorder(AudioSource, new EncodedAudioFileWriter(lastFileName, new Mp3EncoderLame(wf.Channels, wf.SampleRate, AudioBitRate))); } else { Recorder = new AudioRecorder(AudioSource, new WaveFileWriter(lastFileName, wf)); } } else { Recorder = new Recorder(VideoEncoder, ImgProvider, AudioSource); } } Recorder.RecordingStopped += (E) => Dispatcher.Invoke(() => { OnStopped(); if (E != null) { Status.Content = "Error"; MessageBox.Show(E.ToString()); } }); Recorder.Start(Delay); Recent.Add(lastFileName, VideoEncoder == null ? RecentItemType.Audio : RecentItemType.Video); }
public MainWindow() { InitializeComponent(); var devices = Enumerable.Range(-1, WaveIn.DeviceCount + 1).Select(n => WaveIn.GetCapabilities(n)).ToArray(); }
public virtual void Start() { WaveIn.StartRecording(); }
public Naudio() { waveIn = new WaveIn(); waveOut = new WaveOut(); fileName = AppDomain.CurrentDomain.BaseDirectory + @"\Temp.wav"; }
public MainWindow() { InitializeComponent(); this.WindowStartupLocation = System.Windows.WindowStartupLocation.CenterScreen; this.KeyDown += new KeyEventHandler(MainWindow_KeyDown); // Print out all the possible input devices to console. Mostly for debugging. int waveInDevices = WaveIn.DeviceCount; for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++) { WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice); Console.WriteLine("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels); } // Instantiate a waveIn device and start recording. waveIn = new WaveIn(); waveIn.BufferMilliseconds = 47 * buffersize / 2048; waveIn.DeviceNumber = 0; waveIn.WaveFormat = new WaveFormat(44100, 32, 1); waveIn.DataAvailable += waveIn_DataAvailable; try { waveIn.StartRecording(); } catch (NAudio.MmException e) { Console.WriteLine(e.ToString() + "\nPlug in a microphone!"); } history = new List <List <int> >(); inverse_history = new List <List <int> >(); pointHist = new PointCollection(); bin = new int[buffersize * 2]; sampledata = new float[buffersize * 2]; priori = new double[buffersize * 2]; //Initializing all the global variables to base values for 1 speaker configuration. channelLabel = new int[1]; channelLabel[0] = 1; velocity = new int[1]; velocity[0] = 0; prev_displacement = new int[1]; prev_displacement[0] = 0; instant_displacement = new int[1]; instant_displacement[0] = 0; towards_displacement = new int[1]; towards_displacement[0] = 1; displacement = new int[1]; displacement[0] = 0; for (int i = 0; i < buffersize * 2; i++) { bin[i] = i; sampledata[i] = 0; priori[i] = 0; } // Kalman filter related stuff. filter = new VDKalman(2); filter.initialize(1, .1, 1, 0); // To prevent problems with empty lists, we assume 1 channel to start. history.Add(new List <int> { 0 }); inverse_history.Add(new List <int> { 0 }); // Load up the classifier model file. WekaHelper.initialize(); }
public Page_Monitor() { InitializeComponent(); //데이터베이스 로드 databaseSystem.Load(); Draw(); //녹음 시작 Task.Run(() => { WaveIn waveSource = null; WaveFileWriter waveFile = null; int a = 0; while (true) { String FileName = DateTime.Now.ToString().Replace(":", "") + ".wav"; String FilePath = System.Environment.CurrentDirectory + @"\Record\"; String FileFullName = FilePath + FileName; DirectoryInfo directoryInfo = new DirectoryInfo(FilePath); if (directoryInfo.Exists == false) { directoryInfo.Create(); } this.Dispatcher.Invoke(() => { waveSource = new WaveIn(); waveSource.WaveFormat = new WaveFormat(44100, 1); waveSource.DataAvailable += new EventHandler <WaveInEventArgs>((send, ee) => { if (waveFile != null) { waveFile.Write(ee.Buffer, 0, ee.BytesRecorded); waveFile.Flush(); } }); waveSource.RecordingStopped += new EventHandler <StoppedEventArgs>((send, ee) => { if (waveSource != null) { waveSource.Dispose(); waveSource = null; } if (waveFile != null) { waveFile.Dispose(); waveFile = null; } }); waveFile = new WaveFileWriter(FileFullName, waveSource.WaveFormat); }); waveSource.StartRecording(); Thread.Sleep(2000); waveSource.StopRecording(); Thread.Sleep(1000); //분류요청 WebClient myWebClient = new WebClient(); String uriString = "http://10.10.97.210/upload"; byte[] responseArray = myWebClient.UploadFile(uriString, FileFullName); String result = Encoding.ASCII.GetString(responseArray); JObject jo; try { jo = JObject.Parse(result); } catch { continue; } this.Dispatcher.Invoke(() => { for (int i = 0; i < xml_ActuatorList.Children.Count - 1; i++) { ((Page_Monitor_Actuator)xml_ActuatorList.Children[i]).Probability = float.Parse(jo["prob"][i].ToString().Substring(0, 4)) * 100; } }); //분류이후 Temp에서 Record로 이동 FileInfo fileMove = new FileInfo(FilePath); if (fileMove.Exists) { fileMove.MoveTo((FilePath + FileName)); } } }); //전역화 ReDraw = Draw; }
public SettingsForm() { InitializeComponent(); /*if(!Program.setServ.checkIOHash()) * { * this.inputDevices_Combo.BackColor = Color.Red; * this.outputDevices_Combo.BackColor = Color.Red; * }*/ MMDeviceEnumerator enumerator = new MMDeviceEnumerator(); int j = 0; foreach (MMDevice device in enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active)) { //Console.WriteLine("{0}, {1}", device.FriendlyName, device.State); //this.inputDevices_Combo.Items.Add(String.Format("{0}: {1}", j, device.FriendlyName)); j++; } int availableInputDevicesCount = WaveIn.DeviceCount; for (int i = 0; i < availableInputDevicesCount; i++) { WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(i); this.inputDevices_Combo.Items.Add(String.Format("{0}: {1}", i, deviceInfo.ProductName)); } MMDeviceEnumerator enumerator2 = new MMDeviceEnumerator(); int j2 = 0; foreach (MMDevice device in enumerator2.EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active)) { //this.outputDevices_Combo.Items.Add(String.Format("{0}: {1}", j2, device.FriendlyName)); j2++; } int availableOutputDevicesCount = WaveOut.DeviceCount; for (int i = 0; i < availableOutputDevicesCount; i++) { WaveOutCapabilities deviceInfo = WaveOut.GetCapabilities(i); this.outputDevices_Combo.Items.Add(String.Format("{0}: {1}", i, deviceInfo.ProductName)); } this.inputDevices_Combo.SelectedIndex = 0; this.outputDevices_Combo.SelectedIndex = 0; //this.serverAddress_Input.Text = Shared.IP.serverIp; this.serverAddress_Input.Text = Program.setServ.getServerIP(); this.actualIP = Program.setServ.getServerIP(); this.inputDevices_Combo.SelectedIndex = Program.setServ.getIOInputDevice(); this.actualIOInputDev = Program.setServ.getIOInputDevice(); this.outputDevices_Combo.SelectedIndex = Program.setServ.getIOOutputDevice(); this.actualIOOutputDev = Program.setServ.getIOOutputDevice(); this.serverAddress_Input.Enabled = !Program.isLoggedIn; if (this.serverAddress_Input.Enabled) { this.serverAddress_label.Text = "Adres IP Serwera"; } else { this.serverAddress_label.Text = "Wyloguj się aby zmienić IP"; } }
/// <summary> /// Cleans up any resources being used. /// </summary> public new void Dispose() { if(m_IsDisposed){ return; } m_IsDisposed = true; m_pWaveIn.Dispose(); m_pWaveIn = null; }
public static IEnumerable <AudioDeviceInfo> GetInputDevices() { var fullNames = GetFullDeviceNames(DataFlow.Capture); return(Enumerable.Range(0, WaveIn.DeviceCount).Select(id => new AudioDeviceInfo(fullNames.FirstOrDefault(n => n.StartsWith(WaveIn.GetCapabilities(id).ProductName)), id))); }
public virtual void Stop() { WaveIn.StopRecording(); }
public void Enable() { _processing = true; _sampleRate = Micobject.settings.samples; _bitsPerSample = Micobject.settings.bits; _channels = Micobject.settings.channels; RecordingFormat = new WaveFormat(_sampleRate, _bitsPerSample, _channels); //local device int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == Micobject.settings.sourcename) { selind = i; } i++; } if (selind == -1) { //device no longer connected Micobject.settings.active = false; NoSource = true; _processing = false; return; } _waveIn = new WaveIn { BufferMilliseconds = 40, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); _meteringProvider = new MeteringSampleProvider(_sampleChannel); _meteringProvider.StreamVolume += _meteringProvider_StreamVolume; try { _waveIn.StartRecording(); } catch (Exception ex) { MainForm.LogExceptionToFile(ex); MessageBox.Show(LocRM.GetString("AudioMonitoringError") + ": " + ex.Message, LocRM.GetString("Error")); _processing = false; return; } NoSource = false; Micobject.settings.active = true; MainForm.NeedsSync = true; Invalidate(); _processing = false; }
private void Sock_Opened(string ip, EventArgs e) { WelcomeMessageSnackbar.Visibility = Visibility.Hidden; PopupBox.Visibility = Visibility.Hidden; cam.Visibility = Visibility.Visible; mic.Visibility = Visibility.Visible; bze.Visibility = Visibility.Visible; Flash.Visibility = Visibility.Visible; camera = new WebSocket4Net.WebSocket("ws://" + ip + ":9682"); camera.DataReceived += (ui, ty) => { Gr.Dispatcher.Invoke(new Action(() => { MemoryStream strm = new MemoryStream(ty.Data); var imageSource = new BitmapImage(); imageSource.BeginInit(); imageSource.StreamSource = strm; imageSource.EndInit(); if (cam.Tag.ToString() == "0") { Image rotated90 = new Image(); TransformedBitmap tb = new TransformedBitmap(); tb.BeginInit(); tb.Source = imageSource; // Set image rotation. RotateTransform transform = new RotateTransform(-90); tb.Transform = transform; tb.EndInit(); ImageBrush gh = new ImageBrush(tb); gh.Stretch = Stretch.Fill; Gr.Background = gh; } else { Image rotated90 = new Image(); TransformedBitmap tb = new TransformedBitmap(); tb.BeginInit(); tb.Source = imageSource; // Set image rotation. RotateTransform transform = new RotateTransform(90); tb.Transform = transform; tb.EndInit(); ImageBrush gh = new ImageBrush(tb); gh.Stretch = Stretch.Fill; Gr.Background = gh; } } )); }; camera.Closed += (g, j) => { cam.Dispatcher.Invoke(new Action(() => Sock_Closed(g, j))); }; camera.Open(); audio = new WebSocket4Net.WebSocket("ws://" + ip + ":9676"); var ou = new WaveOut(WaveOut.Devices[0], 8000, 16, 1); audio.DataReceived += (gh, data) => { ou.Play(data.Data, 0, data.Data.Length); }; audio.Closed += (h, d) => { ou.Dispose(); }; audio.Open(); oi = new WaveIn(WaveIn.Devices[0], 8000, 16, 1, 400); speaker = new WebSocket4Net.WebSocket("ws://" + ip + ":9679"); oi.BufferFull += Oi_BufferFull; speaker.Open(); }
public NAudioRecorder(WaveIn waveInput, IAudioRecording audioRecording) { _waveInput = waveInput; AudioRecording = audioRecording; }