Exemplo n.º 1
0
        static void Test()
        {
            var soundIn = new WasapiCapture();

            soundIn.Initialize();
            var soundInSource = new RealTimeSoundInSource(soundIn);

            var soundOut = new WasapiOut();

            soundOut.Initialize(soundInSource);
            soundIn.Start();
            soundOut.Play();

            soundOut.Stopped += (s, e) =>
            {
                Console.WriteLine("I'm dead but not dead, P.P.A.P");
                Task.Run(() =>
                {
                    soundOut.Play();
                });
            };

            while (true)
            {
                Console.ReadLine();
            }
        }
Exemplo n.º 2
0
        private static void StartCapture(MMDevice sourceDevice, MMDevice targetDevice)
        {
            var soundIn = new WasapiLoopbackCapture {
                Device = sourceDevice
            };

            soundIn.Initialize();

            var soundOut = new WasapiOut()
            {
                Latency = 100, Device = targetDevice
            };

            soundOut.Initialize(new SoundInSource(soundIn));

            soundIn.Start();
            soundOut.Play();
            while (true)
            {
                if (soundOut.PlaybackState == PlaybackState.Playing)
                {
                    Thread.Sleep(500);
                }
                soundOut.Play();
            }
        }
Exemplo n.º 3
0
 public void Play()
 {
     if (source != null)
     {
         audioOut.Play();
     }
 }
        static void Main(string[] args)
        {
            //一般的な44.1kHz, 16bit, ステレオサウンドの音源を想定
            //var bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(44100, 16, 2));  // for sample.wav
            var bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(8000, 8, 1));

            //ボリューム調整をするために上のBufferedWaveProviderをデコレータっぽく包む
            //var wavProvider = new VolumeWaveProvider16(bufferedWaveProvider);
            //wavProvider.Volume = 0.1f;

            //再生デバイスと出力先を設定
            var mmDevice = new MMDeviceEnumerator()
                           .GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            //外部からの音声入力を受け付け開始
            Task t = StartDummySoundSource(bufferedWaveProvider);

            using (IWavePlayer wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200))
            {
                //出力に入力を接続して再生開始
                //wavPlayer.Init(wavProvider);
                wavPlayer.Init(bufferedWaveProvider);
                wavPlayer.Play();

                Console.WriteLine("Press ENTER to exit...");
                Console.ReadLine();

                wavPlayer.Stop();
            }
        }
        private void button_start_Click(object sender, EventArgs e)
        {
            wavein        = null;
            wavein        = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5);
            wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
            wavein.Initialize();
            wavein.Start();

            source = new SoundInSource(wavein)
            {
                FillWithZeros = true
            };
            //add my special effects in the chain
            efxProcs             = new EfxProcs(source.ToSampleSource().ToMono());
            efxProcs.gain        = linearGain; //keep track of this changing value
            efxProcs.pitchFactor = pitchShift; //keep track of pitch

            waveout        = null;
            waveout        = new WasapiOut(false, AudioClientShareMode.Exclusive, 5);
            waveout.Device = outputDevices[comboBox_speaker.SelectedIndex];
            waveout.Initialize(efxProcs.ToWaveSource()); //source.ToSampleSource().ToWaveSource());//
            waveout.Play();
            //CSCore.Streams.SampleConverter.SampleToIeeeFloat32 sampleToIeee = new CSCore.Streams.SampleConverter.SampleToIeeeFloat32(source.ToSampleSource());
            timer1.Enabled = true;
        }
Exemplo n.º 6
0
        public void PlayAudio()
        {
            if (_AudioPlaying)
            {
                return;
            }
            try
            {
                if (AdjustVolumeOnPlay)
                {
                    AdjustAudioVolume();
                }

                _AudioPlaying              = true;
                _AudioOut                  = new WasapiOut();
                _AudioOut.PlaybackStopped += OutputDevice_PlaybackStopped;

                _AudioReader = new AudioFileReader(AudioFile);
                _AudioOut.Init(_AudioReader);
                _AudioOut.Play();
            }
            catch (Exception ex)
            {
                _AudioPlaying = false;
            }
        }
Exemplo n.º 7
0
        public void TestAudio()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);

                    synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s><audio src=\"C:\\Users\\jgm\\Desktop\\positive.wav\"/>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈlaʊ.təns\">Luyten's</phoneme> <phoneme alphabet=\"ipa\" ph=\"stɑː\">Star</phoneme> system.</s></speak>");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new WaveFileReader(stream);

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
Exemplo n.º 8
0
        public void TestExtendedSource()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);
                    synth.Speak("Test.");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new ExtendedDurationWaveSource(new WaveFileReader(stream), 2000).AppendSource(x => new DmoWavesReverbEffect(x)
                    {
                        ReverbMix = -10
                    });

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
Exemplo n.º 9
0
        public void NotifyDefaultChanged(IAudioDevice audioDevice)
        {
            if (audioDevice.Type != AudioDeviceType.Playback)
            {
                return;
            }

            var task = new Task(() =>
            {
                using (var memoryStreamedSound = GetStreamCopy())
                {
                    var device = _deviceEnumerator.GetDevice(audioDevice.Id);
                    using (var output = new WasapiOut(device, AudioClientShareMode.Shared, true, 10))
                    {
                        output.Init(new WaveFileReader(memoryStreamedSound));
                        output.Play();
                        while (output.PlaybackState == PlaybackState.Playing)
                        {
                            Thread.Sleep(500);
                        }
                    }
                }
            });

            task.Start();
        }
        public void TestPhonemes()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);

                    synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈprəʊˌsɪən\">Procyon</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈkaɪə\">Kaia</phoneme> <phoneme alphabet=\"ipa\" ph=\"ˈbɑːhɑːhɑː\">Bajaja</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the Amnemoi system.</s></speak>");
                    //synth.Speak("Anemoi");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new WaveFileReader(stream);

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
Exemplo n.º 11
0
        private void AudioOutput_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (output != null && output.PlaybackState != PlaybackState.Stopped)
            {
                output.Pause();
            }

            output = new WasapiOut(outputs[audioOutputSelector.SelectedIndex], AudioClientShareMode.Shared, true, outputLatency);

            bitsPrSample = output.OutputWaveFormat.BitsPerSample;
            sampleRate   = output.OutputWaveFormat.SampleRate;
            channels     = output.OutputWaveFormat.Channels;


            // Set the WaveFormat
            outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);

            pflBuffer           = new BufferedWaveProvider(internalFormatStereo);
            pflBuffer.ReadFully = true;
            pflBuffer.DiscardOnBufferOverflow = true;

            WdlResamplingSampleProvider resampler = new WdlResamplingSampleProvider(pflBuffer.ToSampleProvider(), outputFormat.SampleRate);



            output.Init(resampler);
            output.Play();

            Logger.WriteLine("SET OUTPUT FORMAT: "
                             + "Sample Rate: " + sampleRate
                             + ", BitsPrSasmple: " + bitsPrSample
                             + ", Channels: " + channels);
        }
Exemplo n.º 12
0
        private void outputBufferTimerCheck(object sender, EventArgs e)
        {
            if (outputBuffer.BufferedDuration < TimeSpan.FromMilliseconds(40) && output.PlaybackState != PlaybackState.Paused)
            {
                output.Pause();
                Logger.WriteLine(DateTime.Now.TimeOfDay + ": OUTPUT PAUSED");
            }
            else if (outputBuffer.BufferedDuration > TimeSpan.FromMilliseconds(40) && output.PlaybackState != PlaybackState.Playing)
            {
                output.Play();
                Logger.WriteLine(DateTime.Now.TimeOfDay + ": OUTPUT PLAYING");
            }

            // UPDATE OUTPUT STATE INDICATORS
            switch (output.PlaybackState)
            {
            case PlaybackState.Playing:
                outputPlayingIndicator.turnOnUpper();
                outputPausedIndicator.turnOffUpper();
                outputStoppedIndicator.turnOffUpper();
                break;

            case PlaybackState.Paused:
                outputPlayingIndicator.turnOffUpper();
                outputPausedIndicator.turnOnUpper();
                outputStoppedIndicator.turnOffUpper();
                break;

            case PlaybackState.Stopped:
                outputPlayingIndicator.turnOffUpper();
                outputPausedIndicator.turnOffUpper();
                outputStoppedIndicator.turnOnUpper();
                break;
            }
        }
Exemplo n.º 13
0
        static void ReceiverMain(int tcpPort, int udpPort)
        {
            //UdpReceiver soundIn;
            soundIn = new UdpReceiver(tcpPort, udpPort);
            soundIn.Initialize();

            IWaveSource source = new SoundInSource(soundIn)
            {
                FillWithZeros = true
            };

            soundIn.Start();

            //create a soundOut instance to play the data
            soundOut = new WasapiOut();
            //initialize the soundOut with the echoSource
            //the echoSource provides data from the "source" and applies the echo effect
            //the "source" provides data from the "soundIn" instance
            soundOut.Initialize(source);

            //play
            soundOut.Play();

            /*System.Timers.Timer aTimer = new System.Timers.Timer();
             * aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent);
             * aTimer.Interval = 2000;
             * aTimer.Enabled = true;*/

            Console.WriteLine("Receiving audio... Press any key to exit the program.");
            Console.ReadKey();
        }
Exemplo n.º 14
0
        static void Main(string[] args)
        {
            //音源のフォーマットを設定(16kHz, 16bit, 1ch)
            var bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            //バッファサイズを設定
            bufferedWaveProvider.BufferDuration          = new TimeSpan(0, 0, 0, 0, 150); //150ms分確保
            bufferedWaveProvider.DiscardOnBufferOverflow = true;                          //バッファオーバーフロー時にDiscardするように設定

            //バッファサイズ表示
            Console.WriteLine("BufferSize= " + bufferedWaveProvider.BufferLength);

            //再生デバイスと出力先を設定
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            //UDPパケットを受付開始
            Task t = StartReceiveAudioPacketAsync(bufferedWaveProvider);

            using (IWavePlayer wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 20))
            {
                //出力に入力バッファを接続して再生開始
                wavPlayer.Init(bufferedWaveProvider);
                wavPlayer.Play();

                Console.WriteLine("Press ENTER to exit...");
                Console.ReadLine();

                wavPlayer.Stop();
            }
        }
Exemplo n.º 15
0
        public void Initialize()
        {
            _wasapiOut   = new WasapiOut();
            _opusDecoder = OpusDecoder.Create(48000, 1);

            //var waveForm = new WaveFormatExtensible(48000, 16, 1, Guid.Parse("00000003-0000-0010-8000-00aa00389b71"));
            var waveForm = new WaveFormat(48000, 16, 1);

            _writeableBufferingSource = new WriteableBufferingSource(waveForm)
            {
                FillWithZeros = true
            };

            IWaveSource waveSource;

            if (_triggerSingleBlockRead)
            {
                var singleBlockNotificationStream =
                    new SingleBlockNotificationStream(_writeableBufferingSource.ToSampleSource());
                singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;
                waveSource = singleBlockNotificationStream.ToWaveSource();
            }
            else
            {
                waveSource = _writeableBufferingSource;
            }

            _wasapiOut.Initialize(waveSource);
            _wasapiOut.Play();
        }
Exemplo n.º 16
0
        public void Play(string filename, float volume)
        {
            var configuration = configurationManager.LoadConfiguration <AudioConfiguration>();

            configuration.InitializeConfiguration();

            var pathToFile = Path.Combine(soundPathProvider.Path, filename);

            if (!File.Exists(pathToFile))
            {
                return;
            }

            if (volume == 0f)
            {
                logger.Warning($"Die Lautstärke für '{filename}' liegt bei 0 und kann nicht gehört werden.");
                return;
            }

            using var reader       = new MediaFoundationReader(pathToFile);
            using var volumeStream = new WaveChannel32(reader);
            using var outputStream = new WasapiOut(configuration.SelectedSoundCommandDevice, NAudio.CoreAudioApi.AudioClientShareMode.Shared, false, 10);
            volumeStream.Volume    = NormalizeVolume(volume);
            outputStream.Init(volumeStream);
            outputStream.Play();

            Thread.Sleep(reader.TotalTime.Add(TimeSpan.FromMilliseconds(100)));

            outputStream.Stop();
        }
Exemplo n.º 17
0
        public void UpdateDevices(MMDevice input, MMDevice output)
        {
            outp?.Stop();
            outp?.Dispose();

            inp?.StopRecording();
            inp?.Dispose();

            inp = new WasapiCapture(input, true, 5);
            inp.DataAvailable += OnCapture;

            buffer = new BufferedWaveProvider(inp.WaveFormat);

            mixer = new MixingWaveProvider32();
            mixer.AddInputStream(buffer);

            if (resampler == null)
            {
                resampler = new AudioResampler(mixer);
            }
            else
            {
                resampler.Update(mixer);
            }

            outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5);
            outp.Init(resampler);

            inp.StartRecording();
            outp.Play();
        }
Exemplo n.º 18
0
        public void NotifyDefaultChanged(MMDevice audioDevice)
        {
            if (audioDevice.DataFlow != DataFlow.Render)
            {
                return;
            }

            var task = new Task(() =>
            {
                using (var memoryStreamedSound = GetStreamCopy())
                {
                    using (var output = new WasapiOut(audioDevice, AudioClientShareMode.Shared, true, 10))
                    {
                        output.Init(new WaveFileReader(memoryStreamedSound));
                        output.Play();
                        while (output.PlaybackState == PlaybackState.Playing)
                        {
                            Thread.Sleep(500);
                        }
                    }
                }
            });

            task.Start();
        }
Exemplo n.º 19
0
        /// <summary>
        /// UDP音声パケットの受信および再生を開始します
        /// </summary>
        /// <param name="cancellTokenSource">受信を停止するためのCancellationTokenSource</param>
        /// <returns></returns>
        public async Task StartReceiveAsync(CancellationTokenSource cancellTokenSource)
        {
            //音源のフォーマットを設定(16kHz, 16bit, 1ch)
            var bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            //バッファサイズを設定
            bufferedWaveProvider.BufferDuration          = new TimeSpan(0, 0, 0, 0, 150); //150ms分確保
            bufferedWaveProvider.DiscardOnBufferOverflow = true;                          //バッファオーバーフロー時にDiscardするように設定

            //再生デバイスと出力先(WASAPI)を設定
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            //UDPパケットを受付開始
            Task t = StartReceiveAudioPacketAsync(bufferedWaveProvider);

            using (IWavePlayer wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 20))
            {
                //出力に入力バッファを接続して再生開始
                wavPlayer.Init(bufferedWaveProvider);
                wavPlayer.Play();

                //タスクがキャンセルまで非同期で待機
                while (true)
                {
                    if (cancellTokenSource.IsCancellationRequested)
                    {
                        break;
                    }
                    await Task.Delay(100);
                }
                wavPlayer.Stop();
            }
        }
Exemplo n.º 20
0
        public void TestDistortion()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    foreach (InstalledVoice voice in synth.GetInstalledVoices())
                    {
                        Console.WriteLine(voice.VoiceInfo.Name);
                    }

                    synth.SetOutputToWaveStream(stream);
                    synth.Speak("Anaconda golf foxtrot lima one niner six eight requesting docking.");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource         source          = new WaveFileReader(stream);
                    DmoDistortionEffect distortedSource = new DmoDistortionEffect(source);
                    distortedSource.Edge             = 10;
                    distortedSource.PreLowpassCutoff = 4800;

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(distortedSource);
                    soundOut.Play();

                    waitHandle.WaitOne();

                    soundOut.Dispose();
                    distortedSource.Dispose();
                    source.Dispose();
                }
        }
Exemplo n.º 21
0
        public Audio()
        {
            try
            {
                CodecFactory.Instance.Register("ogg-vorbis",
                                               new CodecFactoryEntry(s => new OggSource(s).ToWaveSource(), ".ogg"));

                _mixer    = new SoundMixer();
                _soundOut = new WasapiOut();

                _soundOut.Initialize(_mixer.ToWaveSource());
                _soundOut.Play();
            }
            catch
            {
                Console.WriteLine("Could not load audio");
            }

            _menu = LoadSound(AppContext.BaseDirectory + "assets/sfx/menu.wav");
            LoadSound(AppContext.BaseDirectory + "assets/sfx/fill.wav");

            _random          = new Random();
            _playlist        = new List <string>();
            _playingPlaylist = false;
            var files = Directory.EnumerateFiles(AppContext.BaseDirectory + "/assets/bgm/playlist");

            foreach (var file in files)
            {
                if (!file.EndsWith(".ogg"))
                {
                    continue;
                }
                _playlist.Add(file);
            }
        }
Exemplo n.º 22
0
        public long RunMeasurementRoutine()
        {
            Console.WriteLine($"We will take {c_sampleCount} samples to calculate the end-to-end system latency.");
            Console.WriteLine("Setting up audio devices...");

            InterceptKeys.Instance.OnKey += OnKey;
            InterceptKeys.Start();
            Thread.Sleep(1000);
            _capture.Initialize();
            _capture.DataAvailable += OnData;
            _tracker.Start();
            _capture.Start();

            _output.Initialize(new SampleToIeeeFloat32(new MetronomeGenerator()));
            _output.Play();

            _done.WaitOne();

            float delta = 0;

            for (int i = 0; i < c_sampleCount; i++)
            {
                delta += _keyTicks[i] - _audioTicks[i];
            }
            delta /= c_sampleCount;
            Console.WriteLine($"End-to-end latency: {delta / 10000}ms");
            Thread.Sleep(5000);
            return((long)(delta));
        }
Exemplo n.º 23
0
        public void TestPhonemes()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);

                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"UTF-8\"?><speak version=\"1.0\" xmlns=\"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>This is your <phoneme alphabet=\"ipa\" ph=\"leɪkɒn\">Lakon</phoneme>.</s></speak>");

                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈdɛltə\">delta</phoneme> system.</s></speak>");
                    synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈlaʊ.təns\">Luyten's</phoneme> <phoneme alphabet=\"ipa\" ph=\"stɑː\">Star</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"bliːiː\">Bleae</phoneme> <phoneme alphabet=\"ipa\" ph=\"θuːə\">Thua</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the Amnemoi system.</s></speak>");
                    //synth.Speak("You are travelling to the Barnard's Star system.");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new WaveFileReader(stream);

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
        private void button_Click(object sender, RoutedEventArgs e)
        {
            MMDevice dev = (MMDevice)comboBox.SelectedItem;

            if (mmdevicesOut.Contains(dev))
            {
                capture = new WasapiLoopbackCapture();
            }
            else
            {
                capture = new WasapiCapture();
            }
            capture.Device = dev;

            capture.Initialize();

            w = new WasapiOut();

            w.Device = (MMDevice)comboBox_Copy.SelectedItem;

            w.Initialize(new SoundInSource(capture)
            {
                FillWithZeros = true
            });

            capture.Start();
            w.Play();
        }
Exemplo n.º 25
0
        public void SoundInToSoundOutTest_Wasapi()
        {
            for (int i = 0; i < 10; i++)
            {
                var waveIn = new WasapiCapture();
                waveIn.Initialize();
                waveIn.Start();

                var waveInToSource = new SoundInSource(waveIn)
                {
                    FillWithZeros = true
                };

                var soundOut = new WasapiOut();
                soundOut.Initialize(waveInToSource);
                soundOut.Play();

                Thread.Sleep(2000);

                Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState);

                soundOut.Dispose();
                waveIn.Dispose();
            }
        }
Exemplo n.º 26
0
        public static void Play(string filePath, Func <bool> ShouldStop = null)
        {
            using (var enumerator = new MMDeviceEnumerator())
                using (var device = enumerator.EnumAudioEndpoints(DataFlow.Render, DeviceState.Active).Last())
                    using (var source =
                               CodecFactory.Instance.GetCodec(filePath)
                               .ToSampleSource()
                               .ToMono()
                               .ToWaveSource())

                        using (
                            var soundOut = new WasapiOut()
                        {
                            Latency = 100, Device = device
                        })
                        {
                            soundOut.Initialize(source);
                            soundOut.Play();
                            if (ShouldStop == null)
                            {
                                Thread.Sleep(source.GetLength());
                            }
                            else
                            {
                                while (!ShouldStop())
                                {
                                    Thread.Sleep(5000);
                                }
                            }
                            soundOut.Stop();
                        }
        }
Exemplo n.º 27
0
 public void Start()
 {
     wasapiOut = new WasapiOut();
     wasapiOut.Init(mainMixer);
     wasapiOut.Play();
     timer.Start();
 }
Exemplo n.º 28
0
        private void PlayStream(WaveStream waveStream)
        {
            new Thread(() =>
            {
                using (waveStream)
                {
                    volumeStream = new WaveChannel32(waveStream)
                    {
                        Volume = volumeControl.CurrentVolume, PadWithZeroes = true
                    };
                    Output = new WasapiOut(AudioClientShareMode.Shared, false, 300);
                    using (Output)
                    {
                        Output.Init(volumeStream);
                        Output.Play();

                        while (volumeStream.Position < volumeStream.Length & !reset)
                        {
                            Thread.Sleep(100);
                        }
                    }
                    Output = null;
                    if (!reset)
                    {
                        RaisePlaybackEnded();
                    }
                    reset = false;
                }
            }).Start();
        }
Exemplo n.º 29
0
        private void InitialisePlayback()
        {
            selectedPlaybackDevice = (MMDevice)ComboDevices.SelectedItem;
            AudioClientShareMode shareMode = AudioClientShareMode.Shared; //set sound card to be shared between applications this is usually the default setting
            int  latency      = 20;                                       //wasapi can work at much lower latencie
            bool useEventSync = false;

            wasapiOut = new WasapiOut(selectedPlaybackDevice, shareMode, useEventSync, latency);
            /*device.AudioEndpointVolume.MasterVolumeLevelScalar = (float)sliderVolume.Value;*/ //set volume on physical device as opposed to the wasapi session , apparently this may be integrated into Naudio later
            drc = ApplyDRC.IsChecked.Value;                                                     // ascertain whether to apply drc
            wasapiOut.PlaybackStopped += OnPlaybackStopped;
            //  reader = new AudioFileReader("Paranoid.mp3");
            reader = new AudioFileReader(filePath);

            textBlockDuration.Text = reader.TotalTime.ToString();   //set slider display values
            textBlockPosition.Text = reader.CurrentTime.ToString();
            sliderPosition.Maximum = reader.TotalTime.TotalSeconds; //set max value of slider to track length

            if (drc)
            {
                CompressionReader drcaudio = new CompressionReader(filePath);
                wasapiOut.Init(drcaudio);
            }

            else
            {
                timer.Start();
                reader.Volume = (float)sliderVolume.Value;
                wasapiOut.Init(reader);
            }

            wasapiOut.Play();
        }
Exemplo n.º 30
0
 public override void TurnOn()
 {
     if (_isInitialized)
     {
         _soundOut.Play();
     }
 }