コード例 #1
0
        private static void StartCapture(MMDevice sourceDevice, MMDevice targetDevice)
        {
            var soundIn = new WasapiLoopbackCapture {
                Device = sourceDevice
            };

            soundIn.Initialize();

            var soundOut = new WasapiOut()
            {
                Latency = 100, Device = targetDevice
            };

            soundOut.Initialize(new SoundInSource(soundIn));

            soundIn.Start();
            soundOut.Play();
            while (true)
            {
                if (soundOut.PlaybackState == PlaybackState.Playing)
                {
                    Thread.Sleep(500);
                }
                soundOut.Play();
            }
        }
        private void button_start_Click(object sender, EventArgs e)
        {
            wavein        = null;
            wavein        = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5);
            wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
            wavein.Initialize();
            wavein.Start();

            source = new SoundInSource(wavein)
            {
                FillWithZeros = true
            };
            //add my special effects in the chain
            efxProcs             = new EfxProcs(source.ToSampleSource().ToMono());
            efxProcs.gain        = linearGain; //keep track of this changing value
            efxProcs.pitchFactor = pitchShift; //keep track of pitch

            waveout        = null;
            waveout        = new WasapiOut(false, AudioClientShareMode.Exclusive, 5);
            waveout.Device = outputDevices[comboBox_speaker.SelectedIndex];
            waveout.Initialize(efxProcs.ToWaveSource()); //source.ToSampleSource().ToWaveSource());//
            waveout.Play();
            //CSCore.Streams.SampleConverter.SampleToIeeeFloat32 sampleToIeee = new CSCore.Streams.SampleConverter.SampleToIeeeFloat32(source.ToSampleSource());
            timer1.Enabled = true;
        }
コード例 #3
0
        public WasapiGeneral()
        {
            output = new WasapiOut();

            isCaptureAvailable = false;
            isOutputAvailable  = false;
        }
コード例 #4
0
        public void SoundInToSoundOutTest_Wasapi()
        {
            for (int i = 0; i < 10; i++)
            {
                var waveIn = new WasapiCapture();
                waveIn.Initialize();
                waveIn.Start();

                var waveInToSource = new SoundInSource(waveIn)
                {
                    FillWithZeros = true
                };

                var soundOut = new WasapiOut();
                soundOut.Initialize(waveInToSource);
                soundOut.Play();

                Thread.Sleep(2000);

                Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState);

                soundOut.Dispose();
                waveIn.Dispose();
            }
        }
コード例 #5
0
        public void PlayAudio()
        {
            if (_AudioPlaying)
            {
                return;
            }
            try
            {
                if (AdjustVolumeOnPlay)
                {
                    AdjustAudioVolume();
                }

                _AudioPlaying              = true;
                _AudioOut                  = new WasapiOut();
                _AudioOut.PlaybackStopped += OutputDevice_PlaybackStopped;

                _AudioReader = new AudioFileReader(AudioFile);
                _AudioOut.Init(_AudioReader);
                _AudioOut.Play();
            }
            catch (Exception ex)
            {
                _AudioPlaying = false;
            }
        }
コード例 #6
0
ファイル: Audio.cs プロジェクト: CarimA/GbJam8
        public Audio()
        {
            try
            {
                CodecFactory.Instance.Register("ogg-vorbis",
                                               new CodecFactoryEntry(s => new OggSource(s).ToWaveSource(), ".ogg"));

                _mixer    = new SoundMixer();
                _soundOut = new WasapiOut();

                _soundOut.Initialize(_mixer.ToWaveSource());
                _soundOut.Play();
            }
            catch
            {
                Console.WriteLine("Could not load audio");
            }

            _menu = LoadSound(AppContext.BaseDirectory + "assets/sfx/menu.wav");
            LoadSound(AppContext.BaseDirectory + "assets/sfx/fill.wav");

            _random          = new Random();
            _playlist        = new List <string>();
            _playingPlaylist = false;
            var files = Directory.EnumerateFiles(AppContext.BaseDirectory + "/assets/bgm/playlist");

            foreach (var file in files)
            {
                if (!file.EndsWith(".ogg"))
                {
                    continue;
                }
                _playlist.Add(file);
            }
        }
コード例 #7
0
        public void TestPhonemes()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);

                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"UTF-8\"?><speak version=\"1.0\" xmlns=\"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>This is your <phoneme alphabet=\"ipa\" ph=\"leɪkɒn\">Lakon</phoneme>.</s></speak>");

                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈdɛltə\">delta</phoneme> system.</s></speak>");
                    synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈlaʊ.təns\">Luyten's</phoneme> <phoneme alphabet=\"ipa\" ph=\"stɑː\">Star</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"bliːiː\">Bleae</phoneme> <phoneme alphabet=\"ipa\" ph=\"θuːə\">Thua</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the Amnemoi system.</s></speak>");
                    //synth.Speak("You are travelling to the Barnard's Star system.");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new WaveFileReader(stream);

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
コード例 #8
0
        public void TestDistortion()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    foreach (InstalledVoice voice in synth.GetInstalledVoices())
                    {
                        Console.WriteLine(voice.VoiceInfo.Name);
                    }

                    synth.SetOutputToWaveStream(stream);
                    synth.Speak("Anaconda golf foxtrot lima one niner six eight requesting docking.");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource         source          = new WaveFileReader(stream);
                    DmoDistortionEffect distortedSource = new DmoDistortionEffect(source);
                    distortedSource.Edge             = 10;
                    distortedSource.PreLowpassCutoff = 4800;

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(distortedSource);
                    soundOut.Play();

                    waitHandle.WaitOne();

                    soundOut.Dispose();
                    distortedSource.Dispose();
                    source.Dispose();
                }
        }
コード例 #9
0
        public static AudioPlaybackEngine CreateAudioPlaybackEngine(DeviceInterfaceType type, SampleRate sampleRate, Channels channels, int latency)
        {
            IWavePlayer deviceInterface = null;

            switch (type)
            {
            case DeviceInterfaceType.WaveOut:
                deviceInterface = new WaveOut()
                {
                    DesiredLatency = latency
                };
                break;

            case DeviceInterfaceType.WaveOutEvent:
                deviceInterface = new WaveOutEvent()
                {
                    DesiredLatency = latency
                };
                break;

            case DeviceInterfaceType.WasapiOut:
                deviceInterface = new WasapiOut(AudioClientShareMode.Shared, latency);
                break;

            case DeviceInterfaceType.DirectSoundOut:
                deviceInterface = new DirectSoundOut(latency);
                break;

            default:
                throw new ArgumentException("type");
            }

            return(new AudioPlaybackEngine(deviceInterface, (int)sampleRate, (int)channels));
        }
コード例 #10
0
        protected virtual void Dispose(bool disposing)
        {
            if (!disposedValue)
            {
                if (disposing)
                {
                    recordingStream?.Dispose();
                    recordingStream = null;

                    fileRecordingStream?.Dispose();
                    fileRecordingStream = null;

                    outputDevice?.Dispose();
                    outputDevice = null;

                    targetOutputDevice?.Dispose();
                    targetOutputDevice = null;

                    targetInputDevice?.Dispose();
                    targetInputDevice = null;
                }

                disposedValue = true;
            }
        }
コード例 #11
0
        public void NotifyDefaultChanged(MMDevice audioDevice)
        {
            if (audioDevice.DataFlow != DataFlow.Render)
            {
                return;
            }

            var task = new Task(() =>
            {
                using (var memoryStreamedSound = GetStreamCopy())
                {
                    using (var output = new WasapiOut(audioDevice, AudioClientShareMode.Shared, true, 10))
                    {
                        output.Init(new WaveFileReader(memoryStreamedSound));
                        output.Play();
                        while (output.PlaybackState == PlaybackState.Playing)
                        {
                            Thread.Sleep(500);
                        }
                    }
                }
            });

            task.Start();
        }
コード例 #12
0
        private void CleanUpActiveStream()
        {
            if (recordingStream is not null)
            {
                //Clean up last effect
                recordingStream.StopRecording();
                recordingStream.Dispose();
                recordingStream = null;
            }

            if (fileRecordingStream is not null)
            {
                //Clean up last effect
                fileRecordingStream.StopRecording();
                fileRecordingStream.Dispose();
                fileRecordingStream = null;
            }

            if (outputDevice is not null)
            {
                outputDevice.Stop();
                outputDevice.Dispose();
                outputDevice = null;
            }
        }
コード例 #13
0
 public void Prepare(Uri uri)
 {
     CleanUp();
     Source = CodecFactory.Instance.GetCodec(uri);
     Out    = new WasapiOut();
     Out.Initialize(Source);
 }
コード例 #14
0
ファイル: WasapiOutput.cs プロジェクト: SKKbySSK/KSynthesizer
 public void Initialize(MMDevice device, AudioClientShareMode shareMode, TimeSpan latency, AudioFormat format)
 {
     WaveFormat = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, format.Channels);
     wasapiOut  = new WasapiOut(device, shareMode, true, (int)latency.TotalMilliseconds);
     wasapiOut.Init(this);
     Format = new AudioFormat(wasapiOut.OutputWaveFormat.SampleRate, wasapiOut.OutputWaveFormat.Channels, wasapiOut.OutputWaveFormat.BitsPerSample);
 }
コード例 #15
0
        public WasapiOutput(Dictionary <string, object> config)
        {
            string OutputName = config["OutputName"] as string;

            if (string.IsNullOrWhiteSpace(OutputName))
            {
                OutputName = "<default>";
            }

            MMDevice device = null;

            if (OutputName == "<default>")
            {
                device = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            }
            else
            {
                foreach (MMDevice dev in new MMDeviceEnumerator().EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active))
                {
                    if (dev.ID == OutputName)
                    {
                        device = dev;
                        break;
                    }
                }
            }

            if (device == null)
            {
                throw new InvalidOutputException(OutputName + " not found");
            }

            output = new WasapiOut(device, AudioClientShareMode.Shared, false, 50);
        }
コード例 #16
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            MMDevice dev = (MMDevice)comboBox.SelectedItem;

            if (mmdevicesOut.Contains(dev))
            {
                capture = new WasapiLoopbackCapture();
            }
            else
            {
                capture = new WasapiCapture();
            }
            capture.Device = dev;

            capture.Initialize();

            w = new WasapiOut();

            w.Device = (MMDevice)comboBox_Copy.SelectedItem;

            w.Initialize(new SoundInSource(capture)
            {
                FillWithZeros = true
            });

            capture.Start();
            w.Play();
        }
コード例 #17
0
        public void NotifyDefaultChanged(IAudioDevice audioDevice)
        {
            if (audioDevice.Type != AudioDeviceType.Playback)
            {
                return;
            }

            var task = new Task(() =>
            {
                using (var memoryStreamedSound = GetStreamCopy())
                {
                    var device = _deviceEnumerator.GetDevice(audioDevice.Id);
                    using (var output = new WasapiOut(device, AudioClientShareMode.Shared, true, 10))
                    {
                        output.Init(new WaveFileReader(memoryStreamedSound));
                        output.Play();
                        while (output.PlaybackState == PlaybackState.Playing)
                        {
                            Thread.Sleep(500);
                        }
                    }
                }
            });

            task.Start();
        }
コード例 #18
0
        public static void Play(string filePath, Func <bool> ShouldStop = null)
        {
            using (var enumerator = new MMDeviceEnumerator())
                using (var device = enumerator.EnumAudioEndpoints(DataFlow.Render, DeviceState.Active).Last())
                    using (var source =
                               CodecFactory.Instance.GetCodec(filePath)
                               .ToSampleSource()
                               .ToMono()
                               .ToWaveSource())

                        using (
                            var soundOut = new WasapiOut()
                        {
                            Latency = 100, Device = device
                        })
                        {
                            soundOut.Initialize(source);
                            soundOut.Play();
                            if (ShouldStop == null)
                            {
                                Thread.Sleep(source.GetLength());
                            }
                            else
                            {
                                while (!ShouldStop())
                                {
                                    Thread.Sleep(5000);
                                }
                            }
                            soundOut.Stop();
                        }
        }
コード例 #19
0
        public void TestExtendedSource()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);
                    synth.Speak("Test.");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new ExtendedDurationWaveSource(new WaveFileReader(stream), 2000).AppendSource(x => new DmoWavesReverbEffect(x)
                    {
                        ReverbMix = -10
                    });

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
コード例 #20
0
        private void PlayStream(WaveStream waveStream)
        {
            new Thread(() =>
            {
                using (waveStream)
                {
                    volumeStream = new WaveChannel32(waveStream)
                    {
                        Volume = volumeControl.CurrentVolume, PadWithZeroes = true
                    };
                    Output = new WasapiOut(AudioClientShareMode.Shared, false, 300);
                    using (Output)
                    {
                        Output.Init(volumeStream);
                        Output.Play();

                        while (volumeStream.Position < volumeStream.Length & !reset)
                        {
                            Thread.Sleep(100);
                        }
                    }
                    Output = null;
                    if (!reset)
                    {
                        RaisePlaybackEnded();
                    }
                    reset = false;
                }
            }).Start();
        }
コード例 #21
0
        public void TestAudio()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);

                    synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s><audio src=\"C:\\Users\\jgm\\Desktop\\positive.wav\"/>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈlaʊ.təns\">Luyten's</phoneme> <phoneme alphabet=\"ipa\" ph=\"stɑː\">Star</phoneme> system.</s></speak>");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new WaveFileReader(stream);

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
コード例 #22
0
        public void Play(string filename, float volume)
        {
            var configuration = configurationManager.LoadConfiguration <AudioConfiguration>();

            configuration.InitializeConfiguration();

            var pathToFile = Path.Combine(soundPathProvider.Path, filename);

            if (!File.Exists(pathToFile))
            {
                return;
            }

            if (volume == 0f)
            {
                logger.Warning($"Die Lautstärke für '{filename}' liegt bei 0 und kann nicht gehört werden.");
                return;
            }

            using var reader       = new MediaFoundationReader(pathToFile);
            using var volumeStream = new WaveChannel32(reader);
            using var outputStream = new WasapiOut(configuration.SelectedSoundCommandDevice, NAudio.CoreAudioApi.AudioClientShareMode.Shared, false, 10);
            volumeStream.Volume    = NormalizeVolume(volume);
            outputStream.Init(volumeStream);
            outputStream.Play();

            Thread.Sleep(reader.TotalTime.Add(TimeSpan.FromMilliseconds(100)));

            outputStream.Stop();
        }
コード例 #23
0
        public void StopEncoding()
        {
            _waveIn?.Dispose();
            _waveIn = null;

            _waveOut?.Dispose();
            _waveOut = null;

            _playBuffer?.ClearBuffer();
            _playBuffer = null;

            _encoder?.Dispose();
            _encoder = null;

            _decoder?.Dispose();
            _decoder = null;

            _playBuffer?.ClearBuffer();
            _playBuffer = null;

            _speex?.Dispose();
            _speex = null;

            _waveFile?.Dispose();
            _waveFile = null;

            SpeakerMax = -100;
            MicMax     = -100;
        }
コード例 #24
0
        public void UpdateDevices(MMDevice input, MMDevice output)
        {
            outp?.Stop();
            outp?.Dispose();

            inp?.StopRecording();
            inp?.Dispose();

            inp = new WasapiCapture(input, true, 5);
            inp.DataAvailable += OnCapture;

            buffer = new BufferedWaveProvider(inp.WaveFormat);

            mixer = new MixingWaveProvider32();
            mixer.AddInputStream(buffer);

            if (resampler == null)
            {
                resampler = new AudioResampler(mixer);
            }
            else
            {
                resampler.Update(mixer);
            }

            outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5);
            outp.Init(resampler);

            inp.StartRecording();
            outp.Play();
        }
コード例 #25
0
 public void Start()
 {
     wasapiOut = new WasapiOut();
     wasapiOut.Init(mainMixer);
     wasapiOut.Play();
     timer.Start();
 }
コード例 #26
0
        public void TestPhonemes()
        {
            EventWaitHandle waitHandle = new AutoResetEvent(false);

            using (MemoryStream stream = new MemoryStream())
                using (SpeechSynthesizer synth = new SpeechSynthesizer())
                {
                    synth.SetOutputToWaveStream(stream);

                    synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈprəʊˌsɪən\">Procyon</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the <phoneme alphabet=\"ipa\" ph=\"ˈkaɪə\">Kaia</phoneme> <phoneme alphabet=\"ipa\" ph=\"ˈbɑːhɑːhɑː\">Bajaja</phoneme> system.</s></speak>");
                    //synth.SpeakSsml("<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?><speak version = \"1.0\" xmlns = \"http://www.w3.org/2001/10/synthesis\" xml:lang=\"en-GB\"><s>You are travelling to the Amnemoi system.</s></speak>");
                    //synth.Speak("Anemoi");
                    stream.Seek(0, SeekOrigin.Begin);

                    IWaveSource source = new WaveFileReader(stream);

                    var soundOut = new WasapiOut();
                    soundOut.Stopped += (s, e) => waitHandle.Set();

                    soundOut.Initialize(source);
                    soundOut.Play();

                    waitHandle.WaitOne();
                    soundOut.Dispose();
                    source.Dispose();
                }
        }
コード例 #27
0
        private void InitialisePlayback()
        {
            selectedPlaybackDevice = (MMDevice)ComboDevices.SelectedItem;
            AudioClientShareMode shareMode = AudioClientShareMode.Shared; //set sound card to be shared between applications this is usually the default setting
            int  latency      = 20;                                       //wasapi can work at much lower latencie
            bool useEventSync = false;

            wasapiOut = new WasapiOut(selectedPlaybackDevice, shareMode, useEventSync, latency);
            /*device.AudioEndpointVolume.MasterVolumeLevelScalar = (float)sliderVolume.Value;*/ //set volume on physical device as opposed to the wasapi session , apparently this may be integrated into Naudio later
            drc = ApplyDRC.IsChecked.Value;                                                     // ascertain whether to apply drc
            wasapiOut.PlaybackStopped += OnPlaybackStopped;
            //  reader = new AudioFileReader("Paranoid.mp3");
            reader = new AudioFileReader(filePath);

            textBlockDuration.Text = reader.TotalTime.ToString();   //set slider display values
            textBlockPosition.Text = reader.CurrentTime.ToString();
            sliderPosition.Maximum = reader.TotalTime.TotalSeconds; //set max value of slider to track length

            if (drc)
            {
                CompressionReader drcaudio = new CompressionReader(filePath);
                wasapiOut.Init(drcaudio);
            }

            else
            {
                timer.Start();
                reader.Volume = (float)sliderVolume.Value;
                wasapiOut.Init(reader);
            }

            wasapiOut.Play();
        }
コード例 #28
0
        /// <summary>
        /// UDP音声パケットの受信および再生を開始します
        /// </summary>
        /// <param name="cancellTokenSource">受信を停止するためのCancellationTokenSource</param>
        /// <returns></returns>
        public async Task StartReceiveAsync(CancellationTokenSource cancellTokenSource)
        {
            //音源のフォーマットを設定(16kHz, 16bit, 1ch)
            var bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            //バッファサイズを設定
            bufferedWaveProvider.BufferDuration          = new TimeSpan(0, 0, 0, 0, 150); //150ms分確保
            bufferedWaveProvider.DiscardOnBufferOverflow = true;                          //バッファオーバーフロー時にDiscardするように設定

            //再生デバイスと出力先(WASAPI)を設定
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            //UDPパケットを受付開始
            Task t = StartReceiveAudioPacketAsync(bufferedWaveProvider);

            using (IWavePlayer wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 20))
            {
                //出力に入力バッファを接続して再生開始
                wavPlayer.Init(bufferedWaveProvider);
                wavPlayer.Play();

                //タスクがキャンセルまで非同期で待機
                while (true)
                {
                    if (cancellTokenSource.IsCancellationRequested)
                    {
                        break;
                    }
                    await Task.Delay(100);
                }
                wavPlayer.Stop();
            }
        }
コード例 #29
0
        public void CanPlayStereoToMonoSource()
        {
            //in order to fix workitem 3

            var source = CodecFactory.Instance.GetCodec(testfile);

            Assert.AreEqual(2, source.WaveFormat.Channels);

            var monoSource = new StereoToMonoSource(source);

            Assert.AreEqual(1, monoSource.WaveFormat.Channels);

            ISoundOut soundOut;

            if (WasapiOut.IsSupportedOnCurrentPlatform)
            {
                soundOut = new WasapiOut();
            }
            else
            {
                soundOut = new DirectSoundOut();
            }

            soundOut.Initialize(monoSource.ToWaveSource(16));
            soundOut.Play();

            Thread.Sleep((int)Math.Min(source.GetMilliseconds(source.Length), 60000));

            soundOut.Dispose();
        }
コード例 #30
0
ファイル: AudioOutput.cs プロジェクト: jiehuali/CrossTalk
        private void AudioOutput_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (output != null && output.PlaybackState != PlaybackState.Stopped)
            {
                output.Pause();
            }

            output = new WasapiOut(outputs[audioOutputSelector.SelectedIndex], AudioClientShareMode.Shared, true, outputLatency);

            bitsPrSample = output.OutputWaveFormat.BitsPerSample;
            sampleRate   = output.OutputWaveFormat.SampleRate;
            channels     = output.OutputWaveFormat.Channels;


            // Set the WaveFormat
            outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);

            pflBuffer           = new BufferedWaveProvider(internalFormatStereo);
            pflBuffer.ReadFully = true;
            pflBuffer.DiscardOnBufferOverflow = true;

            WdlResamplingSampleProvider resampler = new WdlResamplingSampleProvider(pflBuffer.ToSampleProvider(), outputFormat.SampleRate);



            output.Init(resampler);
            output.Play();

            Logger.WriteLine("SET OUTPUT FORMAT: "
                             + "Sample Rate: " + sampleRate
                             + ", BitsPrSasmple: " + bitsPrSample
                             + ", Channels: " + channels);
        }