public Call(IPEndPoint Address) { UdpSender = new UdpClient(); UdpSender.Connect(Address); this.Address = Address; _encoder = new SpeexEncoder(BandMode.Wide); SpeexProvider = new JitterBufferWaveProvider(); SoundOut = new WaveOutEvent(); SoundOut.Init(SpeexProvider); SoundOut.Play(); }
public void play(string musicFile) { if (outputDevice == null) { outputDevice = new WaveOutEvent(); outputDevice.PlaybackStopped += OnPlaybackStopped; } if (fileReader == null) { fileReader = this.GetReader(musicFile); this.SetReaderVolume(this.fileReader); outputDevice.Init(fileReader); } outputDevice.Play(); }
public void Init(int deviceNumber) { if (outputDevice != null) { outputDevice.Dispose(); } var output = new WaveOutEvent(); output.DeviceNumber = deviceNumber; output.Init(mixer); output.Play(); outputDevice = output; }
/** * Checks to see if the file is a .wav or .mp3 **/ private void CheckSong(string song) { waveOut = new WaveOutEvent(); if (song.Contains(".mp3")) { mp3Reader = new Mp3FileReader(song); waveOut.Init(mp3Reader); } else if (song.Contains(".wav")) { waveReader = new WaveFileReader(song); waveOut.Init(waveReader); } }
private void button1_Click(object sender, EventArgs e) { TrackEffectPitch TEP; TEP = new TrackEffectPitch(new AudioFileReader(openWavTrack.inputpath), 4096, 2L, 0.5f); WaveOutEvent wo = new WaveOutEvent { DesiredLatency = 150, NumberOfBuffers = 3 }; openWavTrack.pause(openWavTrack.output); wo.Init(new SampleToWaveProvider16(TEP)); wo.Play(); }
private void CleanUp() { if (outputDevice != null) { outputDevice.Stop(); outputDevice.Dispose(); outputDevice = null; } if (vorbis != null) { vorbis.Dispose(); vorbis = null; } }
public void StartPlayback(FixedSizedQueue <byte[]> queue) { var waveOut = new WaveOutEvent(); if (_isPlayback) { return; } using (Mp3FileReader mp3 = new Mp3FileReader(new QueueStream(queue))) { waveOut.Init(mp3); waveOut.Play(); } }
public void Init(ISampleProvider sampleProvider) { lock (lockObj) { if (waveOutEvent != null) { waveOutEvent.Stop(); waveOutEvent.Dispose(); } waveOutEvent = new WaveOutEvent() { DeviceNumber = deviceNumber, }; waveOutEvent.Init(sampleProvider); } }
private void btnReproducir_Click(object sender, RoutedEventArgs e) { if (output != null && output.PlaybackState == PlaybackState.Paused) { output.Play(); btnDetener.IsEnabled = true; btnPausa.IsEnabled = true; btnReproducir.IsEnabled = false; } else { reader = new AudioFileReader(txtRutaArchivo.Text); delay = new Delay(reader); fades = new FadeInOutSampleProvider(delay, true); double milisegundosFadein = Double.Parse(txtFadeIn.Text) * 1000.0; fades.BeginFadeIn(milisegundosFadein); output = new WaveOutEvent(); fadingOut = false; output.DeviceNumber = cbSalida.SelectedIndex; output.PlaybackStopped += Output_PlaybackStopped; volume = new EfectoVolumen(fades); volume.Volume = (float)sldVolumen.Value; output.Init(volume); output.Play(); btnDetener.IsEnabled = true; btnPausa.IsEnabled = true; btnReproducir.IsEnabled = false; lblTiempoTotal.Text = reader.TotalTime.ToString().Substring(0, 8); lblTiempoActual.Text = reader.CurrentTime.ToString().Substring(0, 8); sldRepro.Maximum = reader.TotalTime.TotalSeconds; sldRepro.Value = reader.CurrentTime.TotalSeconds; timer.Start(); } }
private void Speak(string textToSpeak, Action callback = null) { if (string.IsNullOrEmpty(textToSpeak)) { callback?.Invoke(); return; } try { Voice voice = _comboBox.SelectedItem as Voice; Task.Run( () => { using (var speechSynthesizer = new SpeechSynthesizer()) { speechSynthesizer.SelectVoice(voice.Value.VoiceInfo.Name); var stream = new MemoryStream(); speechSynthesizer.SetOutputToWaveStream(stream); speechSynthesizer.Speak(textToSpeak); stream.Position = 0; using (WaveStream blockAlignedStream = new BlockAlignReductionStream(WaveFormatConversionStream.CreatePcmStream( new WaveFileReader(stream)))) { var autoResetEvent = new AutoResetEvent(false); var waveOut = new WaveOutEvent { DeviceNumber = GetWaveOutDeviceNumber() }; waveOut.Init(blockAlignedStream); waveOut.PlaybackStopped += (e, o) => { Debug.WriteLine("nAudio: Stopped Speaking"); autoResetEvent.Set(); }; Debug.WriteLine($"nAudio: Saying '{textToSpeak}'"); waveOut.Play(); autoResetEvent.WaitOne(); callback?.Invoke(); Debug.WriteLine("nAudio: Exited"); } } }); } catch { // nop } }
private void btn_stop_Click(object sender, EventArgs e) { if (output != null) { btn_stop.Enabled = false; btn_pause.Enabled = false; vsb_volumeL.Enabled = false; vsb_volumeR.Enabled = false; vsb_volumeM.Enabled = false; trb_progress.Enabled = false; cb_controle.Enabled = false; progressBar1.Value = 0; progressBar2.Value = 0; progressBar3.Value = 0; progressBar4.Value = 0; progressBar6.Value = 0; progressBar7.Value = 0; progressBar8.Value = 0; btn_pause.Text = "Play"; lb_name.Text = ""; lb_length.Text = ""; trb_progress.Value = 0; lb_decTime.Text = ""; if (output != null) { if (output.PlaybackState == NAudio.Wave.PlaybackState.Playing) { output.Stop(); } output.Dispose(); output = null; } if (stream != null) { stream.Dispose(); stream = null; } if (pitch != null) { pitch = null; } if (stereo != null) { stereo = null; } } }
/// <summary> /// Handling packets received on the RTP socket. One of the simplest, if not the simplest, cases is /// PCMU audio packets. THe handling can get substantially more complicated if the RTP socket is being /// used to multiplex different protocols. This is what WebRTC does with STUN, RTP and RTCP. /// </summary> /// <param name="rtpSocket">The raw RTP socket.</param> /// <param name="rtpSendSession">The session infor for the RTP pakcets being sent.</param> private static async void RecvRtp(Socket rtpSocket, RTPSession rtpRecvSession, CancellationTokenSource cts) { try { DateTime lastRecvReportAt = DateTime.Now; uint packetReceivedCount = 0; uint bytesReceivedCount = 0; byte[] buffer = new byte[512]; IPEndPoint anyEndPoint = new IPEndPoint((rtpSocket.AddressFamily == AddressFamily.InterNetworkV6) ? IPAddress.IPv6Any : IPAddress.Any, 0); Log.LogDebug($"Listening on RTP socket {rtpSocket.LocalEndPoint}."); using (var waveOutEvent = new WaveOutEvent()) { var waveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 1)); waveProvider.DiscardOnBufferOverflow = true; waveOutEvent.Init(waveProvider); waveOutEvent.Play(); var recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint); Log.LogDebug($"Initial RTP packet recieved from {recvResult.RemoteEndPoint}."); while (recvResult.ReceivedBytes > 0 && !cts.IsCancellationRequested) { var rtpPacket = new RTPPacket(buffer.Take(recvResult.ReceivedBytes).ToArray()); packetReceivedCount++; bytesReceivedCount += (uint)rtpPacket.Payload.Length; for (int index = 0; index < rtpPacket.Payload.Length; index++) { short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(rtpPacket.Payload[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; waveProvider.AddSamples(pcmSample, 0, 2); } recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint); } } } catch (ObjectDisposedException) { } // This is how .Net deals with an in use socket being closed. Safe to ignore. catch (Exception excp) { Log.LogError($"Exception processing RTP. {excp}"); } }
public AudioPlayer(Resource resource, TabPage tab) { var soundData = (Sound)resource.Blocks[BlockType.DATA]; var stream = soundData.GetSoundStream(); waveOut = new WaveOutEvent(); try { if (soundData.Type == Sound.AudioFileType.WAV) { var rawSource = new WaveFileReader(stream); waveOut.Init(rawSource); } else if (soundData.Type == Sound.AudioFileType.MP3) { var builder = new Mp3FileReader.FrameDecompressorBuilder(wf => new Mp3FrameDecompressor(wf)); var rawSource = new Mp3FileReader(stream, builder); waveOut.Init(rawSource); } else if (soundData.Type == Sound.AudioFileType.AAC) { var rawSource = new StreamMediaFoundationReader(stream); waveOut.Init(rawSource); } playButton = new Button(); playButton.Text = "Play"; playButton.TabIndex = 1; playButton.Size = new Size(100, 25); playButton.Click += PlayButton_Click; tab.Controls.Add(playButton); } catch (Exception e) { Console.Error.WriteLine(e); var msg = new Label { Text = $"NAudio Exception: {e.Message}", Dock = DockStyle.Fill, }; tab.Controls.Add(msg); } }
static void Main(string[] args) { IWebDriver driver = new ChromeDriver(@"C:\Users\shoha\source\repos\SeleniumDemo\test\bin\Debug"); // Shoham string savedImagePath = @"C:\Temp\Screenshot.png"; // Shoham //IWebDriver driver = new ChromeDriver(@"C:\Users\regl9\source\repos\SeleniumDemo\SeleniumDemo\bin\Debug\netcoreapp3.1"); // Regis //string imagePath = @"C:\Users\regl9\source\repos\SeleniumDemo\imgs\Screenshot.png"; // Regis string audioFile = @"C:\Windows\Media\Alarm01.wav"; // Set up alarm var audio = new AudioFileReader(audioFile); var outputDevice = new WaveOutEvent(); //****//For taking the screenshot the first time //driver.Navigate().GoToUrl(@"https://direct.playstation.com/en-us/consoles/console/playstation5-digital-edition-console.3005817"); //Thread.Sleep(1000); //Screenshot imageToSave = ((ITakesScreenshot)driver).GetScreenshot(); //Bitmap img = Image.FromStream(new MemoryStream(imageToSave.AsByteArray)) as Bitmap; //img.Save(savedImagePath); //****// Bitmap savedImage = (Bitmap)Bitmap.FromFile(savedImagePath); List <bool> savedImageList = GetHash(savedImage); byte[] savedImageBytes = File.ReadAllBytes(savedImagePath); // Read in saved screenshot bool run = true; while (run) { driver.Navigate().GoToUrl(@"https://direct.playstation.com/en-us/consoles/console/playstation5-digital-edition-console.3005817"); Thread.Sleep(1000); Screenshot currentImage = ((ITakesScreenshot)driver).GetScreenshot(); Bitmap currentBitmap = Image.FromStream(new MemoryStream(currentImage.AsByteArray)) as Bitmap; List <bool> currentImageList = GetHash(currentBitmap); if (!savedImageList.SequenceEqual(currentImageList)) { outputDevice.Init(audio); outputDevice.Play(); while (outputDevice.PlaybackState == PlaybackState.Playing) { Thread.Sleep(10000); // Alarm will play for 10 seconds outputDevice.Stop(); run = false; } } } }
private void btnPlay_Click(object sender, RoutedEventArgs e) { if (output != null && output.PlaybackState == PlaybackState.Paused) { output.Play(); btnPlay.IsEnabled = false; btnPause.IsEnabled = true; btnStop.IsEnabled = true; } else { if (txtRuta.Text != null && txtRuta.Text != "") { output = new WaveOutEvent(); output.PlaybackStopped += OnPlaybackStop; reader = new Mp3FileReader(txtRuta.Text); //Configuraciones de WaveOut output.DeviceNumber = cbDispositivos.SelectedIndex; output.NumberOfBuffers = 2; output.DesiredLatency = 150; output.Init(reader); output.Play(); btnStop.IsEnabled = true; btnPause.IsEnabled = true; btnPlay.IsEnabled = false; //00:00:15.465456456 lblDuration.Text = reader.TotalTime.ToString().Substring(0, 8); lblPosition.Text = reader.CurrentTime.ToString().Substring(0, 8); sldPosition.Maximum = reader.TotalTime.TotalSeconds; sldPosition.Value = 0; timer.Start(); } else { //Avisarle al usuario que elija un archivo } } }
private void InicialitzarSo() { try { ou = new WaveOutEvent { DeviceNumber = OutDev }; waveOut = ou; waveOut.Init(sampleProvider); } catch (Exception e) { throw new RAudioNAudioException("error al inicialitzar el so", e); } }
/// <summary> /// Handles the 'WaveOutEvent.PlaybackStopped'. /// @note The wave-device's output buffer needs to be cleared or else /// the buffer will *usually* just grow larger and larger; however /// 'WaverF' uses the buffer's current position to draw the track-caret /// in the wave-panel so the output-buffer needs to be reset (recreated) /// whenever the 'PlaybackStopped' event fires. /// </summary> /// <param name="sender"></param> /// <param name="args"></param> void OnPlaybackStopped(object sender, StoppedEventArgs args) { if (!_close) { _waveout.Dispose(); // ... gr _waveout = new WaveOutEvent(_wavereader); _waveout.PlaybackStopped += OnPlaybackStopped; EnableButtons(); bu_play.Image = global::FXE_Generator.Properties.Resource.transport_play; _t1.Stop(); pa_wave.Invalidate(); } }
/// <summary> /// Plays this instance. /// </summary> public async Task Play() { using (WaveOutEvent WaveDevice = new WaveOutEvent()) { using (AudioFileReader Reader = new AudioFileReader(this.File.FullName)) { WaveDevice.Init(Reader); WaveDevice.Play(); while (WaveDevice.PlaybackState == PlaybackState.Playing) { await Task.Delay(1000); } } } }
private void Play() { WaveOutEvent output = new WaveOutEvent(); BufferedWaveProvider buffer = new BufferedWaveProvider(new WaveFormat(sample_, 16, 1)); //Pürüzsüz bir ses geliyor bu ayarda :) //buffer.BufferLength = 2560 * 16; //buffer.DiscardOnBufferOverflow = true; output.Init(buffer); output.Play(); for (; ;) { IPEndPoint remoteEP = null; byte[] data = newsock.Receive(ref remoteEP); buffer.AddSamples(data, 0, data.Length); } }
public void Play(Entity entity) { Stop(); IsPlaying = true; currentEntity = entity; audioFile.Position = 0; audioFile.Volume = Utils.CalculateVolume(entity); panner = new PanningSampleProvider(audioFile); panner.Pan = Utils.CalculateStereo(entity); outputDevice = new WaveOutEvent(); outputDevice.PlaybackStopped += OutputDevice_PlaybackStopped; outputDevice.Init(panner); outputDevice.Play(); }
public void PlayAccessDenied() { var path = Directory.GetCurrentDirectory(); var file = Path.Combine(path, "AccessDenied.mp3"); using (var audioFile = new AudioFileReader(file)) using (var outputDevice = new WaveOutEvent()) { outputDevice.Init(audioFile); outputDevice.Play(); while (outputDevice.PlaybackState == PlaybackState.Playing) { Thread.Sleep(1000); } } }
private void Five_trackbar_Scroll(object sender, EventArgs e) { if (!(outPutDevice == null)) { outPutDevice.Stop(); } outPutDevice = new WaveOutEvent(); generator = new SignalGenerator() { Gain = 0.2, Frequency = 500, FrequencyEnd = 2000 }; generator.Type = SignalGeneratorType.Sweep; outPutDevice.Volume = One_trackbar.Value / 10f; outPutDevice.Init(generator); outPutDevice.Play(); }
public static void PlayAudioTrack(string path) { var task = Task.Factory.StartNew(() => { var player = new WaveOutEvent(); var stream = new WaveFileReader(Path.GetFileNameWithoutExtension(path) + "(2).wav"); player.Init(new WaveChannel32(stream)); player.Play(); player.PlaybackStopped += (sender, args) => { player.Dispose(); stream.Dispose(); }; }); }
private void button2_Click(object sender, EventArgs e) { OpenFileDialog OFD = new OpenFileDialog(); if (OFD.ShowDialog() == DialogResult.OK) { SMB = new SMBPitchShiftingSampleProvider(new AudioFileReader(OFD.FileName), 4096, 8L, Pitch); WaveOutEvent wo = new WaveOutEvent { DesiredLatency = 150, NumberOfBuffers = 3 }; wo.Init(new SampleToWaveProvider16(SMB)); wo.Play(); } }
/// ------------------------------------------------------------------------------------ protected virtual void OnPlaybackStopped(WaveOutEvent sender, TimeSpan startTime, TimeSpan endTime) { sender.Dispose(); if (_waveOut == sender) { SetCursor(_playbackStream.CurrentTime); _waveOut = null; _scrollCalculator = null; if (PlaybackStopped != null) { PlaybackStopped(this, startTime, endTime); } } }
public IActionResult TestMp3() { var file = @"D:\Music\mp3\Western\Popular\Gerry Rafferty\City to City\Gerry Rafferty - 01 The Ark.mp3"; using (var audioFile = new AudioFileReader(file)) using (var outputDevice = new WaveOutEvent()) { outputDevice.Init(audioFile); outputDevice.Play(); while (outputDevice.PlaybackState == PlaybackState.Playing) { Thread.Sleep(1000); } } return(new EmptyResult()); }
public IActionResult TestFlac2() { var file = @"D:\Music\flac\Western\Popular\Gerry Rafferty\City to City\1978_077 Gerry Rafferty - Baker Street.flac"; using (var audioFile = new MediaFoundationReader(file)) using (var outputDevice = new WaveOutEvent()) { outputDevice.Init(audioFile); outputDevice.Play(); while (outputDevice.PlaybackState == PlaybackState.Playing) { Thread.Sleep(1000); } } return(new EmptyResult()); }
public Audio GetMusic(string fileName) { var audioFile = new AudioFileReader(_appSettingsProvider.SoundFilesLocation + $"\\{fileName}"); var outputDevice = new WaveOutEvent(); outputDevice.Init(audioFile); outputDevice.Play(); var audio = new Audio() { Reader = audioFile, Output = outputDevice }; return(audio); }
void test_play_fileLocal(string url) { //string url = "1.m4a"; //url = "1.mp3"; //url = "2.mp4"; using (var audioFile = new AudioFileReader(url)) using (var outputDevice = new WaveOutEvent()) { outputDevice.Init(audioFile); outputDevice.Play(); while (outputDevice.PlaybackState == PlaybackState.Playing) { Thread.Sleep(1000); } } }
public static void setup() { ready = true; //determine the default device number for audio var enumerator = new MMDeviceEnumerator(); MMDevice def = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); Console.WriteLine("Audio device: " + def.FriendlyName); //create devices bgmChannel = new WaveOutEvent(); seChannel1 = new WaveOutEvent(); seChannel2 = new WaveOutEvent(); seChannel3 = new WaveOutEvent(); seChannel4 = new WaveOutEvent(); }