Support for playback using Wasapi
Inheritance: IWavePlayer
 public ScorePlayer()
 {
     _syncObject  = new object();
     _waveStream  = new WaveMixerStream32();
     _soundPlayer = new AudioOut(AudioClientShareMode.Shared, 60);
     _soundPlayer.Init(_waveStream);
     _channels = new Dictionary <WaveStream, WaveChannel32>();
     PlayerSettings.MusicVolumeChanged += OnMusicVolumeChanged;
 }
Exemple #2
0
 public IWavePlayer CreateDevice(int latency)
 {
     var wasapi = new WasapiOut(
         settingsPanel.SelectedDevice,
         settingsPanel.ShareMode,
         settingsPanel.UseEventCallback,
         latency);
     return wasapi;
 }
Exemple #3
0
 static void PlayWithWasapi()
 {
     using (IWavePlayer output = new WasapiOut(AudioClientShareMode.Shared, 10))
     using (PdProvider pd = new PdProvider())
     {
         output.Init(pd);
         output.Play();
         Console.ReadLine();
         output.Stop();
     }
 }
Exemple #4
0
        void c_valueChanged(object sender, EventArgs e)
        {
            if (discard_events)
            {
                return;
            }

            var verter = (Verter)sender;
            var ev     = verter.eventType;

            if (ev == Verter.EventType.mute)
            {
                chdev(0);
                return;
            }

            if (ev == Verter.EventType.solo)
            {
                discard_events = true;
                for (var a = 0; a < gVu.Length; a++)
                {
                    gVu[a].enabled = false;
                }

                ((Verter)sender).enabled = true;
                discard_events           = false;
                chdev(0);
                return;
            }

            if (ev == Verter.EventType.airhorn)
            {
                gPreviewOn.Checked = false;
                chdev(0);

                if (unFxTimer == null)
                {
                    unFxTimer          = new Timer();
                    unFxTimer.Interval = 3000;
                    unFxTimer.Tick    += delegate(object oa, EventArgs ob)
                    {
                        unFX();
                    };
                }
                unFX();
                fx_stream = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream("Loopstream.res.sc.wav");
                fx_wav    = new NAudio.Wave.WaveFileReader(fx_stream);
                var prov2 = new NPatch.ChannelMapperOut(fx_wav.ToSampleProvider(), new int[] { (int)verter.Tag }, src.wf.Channels);
                fx_out = new NAudio.Wave.WasapiOut(src.mm, NAudio.CoreAudioApi.AudioClientShareMode.Shared, false, 100);
                fx_out.Init(prov2);
                fx_out.Play();
                unFxTimer.Start();
            }
        }
Exemple #5
0
        public AudioManager()
        {
            _mixerStream = new WaveMixerStream32 {
                AutoStop = false
            };

            _soundPlayer = new AudioOut(AudioClientShareMode.Shared, 60);
            _soundPlayer.Init(_mixerStream);

            Sfx = new SfxManager(this);

            _soundPlayer.Play();
        }
 public void NotifyDefaultChanged(IAudioDevice audioDevice)
 {
     if (audioDevice.Type != AudioDeviceType.Playback)
         return;
     var task = new Task(() =>
     {
         var device = _deviceEnumerator.GetDevice(audioDevice.Id);
         using (var output = new WasapiOut(device, AudioClientShareMode.Shared, true, 10))
         {
             output.Init(new WaveFileReader(Resources.NotificationSound));
             output.Play();
             while (output.PlaybackState == PlaybackState.Playing)
             {
                 Thread.Sleep(500);
             }
         }
     });
     task.Start();
 }
Exemple #7
0
        internal void PlayMusic(double startOffsetInMillis)
        {
            var o = _selectedWaveOut;

            if (o == null)
            {
                o = new AudioOut(AudioClientShareMode.Shared, 0);
                var fileStream = new FileStream(Project.MusicFileName, FileMode.Open, FileAccess.Read);
                _waveReader        = new WaveFileReader(fileStream);
                o.PlaybackStopped += SelectedWaveOut_PlaybackStopped;
                o.Init(_waveReader);
                _selectedWaveOut = o;
            }

            if (o.PlaybackState == PlaybackState.Playing)
            {
                o.Stop();
            }

            _waveReader.CurrentTime = TimeSpan.FromMilliseconds(startOffsetInMillis);
            o.Play();
            CmdMusicPlay.RaiseCanExecuteChanged();
        }
Exemple #8
0
        private void PlayStream(WaveStream waveStream)
        {
            new Thread(() =>
                           {
                               using (waveStream)
                               {
                                   volumeStream = new WaveChannel32(waveStream) { Volume = volumeControl.CurrentVolume, PadWithZeroes = true };
                                   Output = new WasapiOut(AudioClientShareMode.Shared, false, 300);
                                   using (Output)
                                   {                                       
                                       Output.Init(volumeStream);
                                       Output.Play();

                                       while (volumeStream.Position < volumeStream.Length & !reset)
                                       {
                                           Thread.Sleep(100);
                                       }
                                   }
                                   Output = null;
                                   if(!reset) RaisePlaybackEnded();
                                   reset = false;
                               }
                           }).Start();
        }
 private void closeDevice()
 {
     StopPlayback();
     if (FWaveOut != null)
     {
         FWaveOut.Dispose();
         FWaveOut = null;
     }
     FPinOutStatus[0] = "Device closed";
 }
Exemple #10
0
        public static void PlaySong(IEnumerable<Track> tracks)
        {
            var enumerator = new MMDeviceEnumerator();
            var defaultDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(defaultDevice.AudioClient.MixFormat.SampleRate, 1);

            var wasapiOut = new WasapiOut(AudioClientShareMode.Shared, false, 60);
            MediaBankBase bank = new FenderStratCleanB(waveFormat);
            MediaBankBase bankBass = new RockdaleBassBridge(waveFormat);

            var mixer = new MixingSampleProvider(waveFormat);

            var trackSampleProviders =
                tracks.Select(t => new TrackSampleProvider(t.Patch == MediaPatch.CleanGuitar ? bank : bankBass, t))
                    .ToArray();
            var playedTracks = new List<int>();

            foreach(var track in trackSampleProviders)
            {
                track.OnPhrasePlaying += (sender, args) =>
                {
                    var channel = args.Track.Channel;
                    var phrase = args.Phrase;

                    if(playedTracks.Contains(channel))
                    {
                        AsyncConsole.WriteLine();
                        PrintUtils.PrintContentTable();

                        playedTracks.Clear();
                    }

                    PrintUtils.PrintContent(phrase.Notes != null && phrase.Notes.Length > 0
                        ? string.Join(",", phrase.Notes)
                        : phrase.Command.ToString(), channel);

                    playedTracks.Add(channel);
                };
                mixer.AddMixerInput(track);
            }

            wasapiOut.Init(new VolumeSampleProvider(mixer)
            {
                Volume = 0.7f
            });

            PrintUtils.Init(trackSampleProviders.Length);

            PrintUtils.PrintHeaderOfTable();
            PrintUtils.PrintRowDividerTable();
            PrintUtils.PrintContentTable();

            wasapiOut.Play();

            var resetEvent = new ManualResetEvent(false);

            wasapiOut.PlaybackStopped += (sender, args) =>
            {
                resetEvent.Set();
                if(args.Exception != null)
                {
                    throw args.Exception;
                }
            };

            resetEvent.WaitOne();
            Console.WriteLine();
            PrintUtils.PrintFooterOfTable();
        }
 /// <summary>
 ///     Initialize Wasapi and starts playing
 /// </summary>
 private void InitializeWasapi()
 {
     if (_wasapiOut != null) _wasapiOut.Dispose();
     _wasapiOut = new WasapiOut(AudioClientShareMode.Shared, true, 500);
     _wasapiOut.Init(_waveProvider);
     _wasapiOut.Play();
 }
        public Task Start(BuildState state, BuildStatus status)
        {
            var tcs = new TaskCompletionSource<byte>();

            TrackableWaveChannel stream = null;
            WasapiOut device = null;

            try
            {
                var fileName = "";

                switch (status)
                {
                    case BuildStatus.Broken:
                        fileName = configuration.Broken;
                        break;
                    case BuildStatus.StillBroken:
                        fileName = configuration.StillBroken;
                        break;
                    case BuildStatus.Fixed:
                        fileName = configuration.Fixed;
                        break;
                    default:
                        throw new ArgumentOutOfRangeException("status");
                }

                if (File.Exists(fileName))
                {
                    device = new WasapiOut(AudioClientShareMode.Shared, 100);
                    stream = new TrackableWaveChannel(BuildStream(fileName));

                    device.Init(stream);
                    device.Play();

                    stream.Finished += (sender, args) =>
                    {
                        tcs.SetResult(0);

                        stream.Dispose();
                        device.Dispose();

                        stream = null;
                        device = null;
                    };
                }
                else
                {
                    var task = tcs.Task;
                    tcs.SetResult(0);
                    return task;
                }
            }
            catch (Exception e)
            {
                tcs.SetException(e);

                if (stream != null)
                {
                    stream.Dispose();
                    stream = null;
                }

                if (device != null)
                {
                    device.Dispose();
                    device = null;
                }
            }

            return tcs.Task;
        }
        public void OpenFile(string path)
        {
            if (ActiveStream == null)
            {
                SelectionBegin = TimeSpan.Zero;
                SelectionEnd = TimeSpan.Zero;
                ChannelPosition = 0;

                if (System.IO.File.Exists(path))
                {
                    try
                    {
                        if (currentDevice == null)
                        {
                            currentSelectedDevice();
                        }
                        AudioClientShareMode shareMode = AudioClientShareMode.Shared;
                        int latency = 100;
                        bool useEventSync = false;
                        wasapiOutDevice = new WasapiOut(currentDevice, shareMode, useEventSync, latency);
                        currentDevice.AudioEndpointVolume.MasterVolumeLevelScalar = (float)volumeValue;

                        ActiveStream = new Mp3FileReader(path);
                        inputStream = new WaveChannel32(ActiveStream);
                        sampleAggregator = new SampleAggregator(fftDataSize);
                        inputStream.Sample += inputStream_Sample;
                        wasapiOutDevice.Init(inputStream);
                        ChannelLength = inputStream.TotalTime.TotalSeconds;
                        FileTag = TagLib.File.Create(path);
                        GenerateWaveformData(path);
                        CanPlay = true;
                    }
                    catch
                    {
                        ActiveStream = null;
                        CanPlay = false;
                    }
                }
            }
        }
Exemple #14
0
 public WasapiOut(AudioClientShareMode shareMode, bool useEventSync, int latency) : this(WasapiOut.GetDefaultAudioEndpoint(), shareMode, useEventSync, latency)
 {
 }
Exemple #15
0
        public static void Execute(QiSession session)
        {
            string serviceName = "CSharpSoundDownloaderSpare";
            var audioDevice = session.GetService("ALAudioDevice");

            var waveIn = new WaveInEvent();

            #region 1/4: ロボットへ音を投げる方の仕込み
            //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる
            //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意
            audioDevice["setParameter"].Call("outputSampleRate", 16000);

            //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。
            //バッファ長は16384を超えてはいけない点に注意
            //(詳細は公式ドキュメンテーション参照)
            waveIn.BufferMilliseconds = 200;
            //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる
            waveIn.WaveFormat = new WaveFormat(16000, 16, 2);

            int count = 0;
            waveIn.DataAvailable += (_, e) =>
            {
                if (e.BytesRecorded > 16384) return;

                byte[] bufferToSend = new byte[e.BytesRecorded];
                Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded);

                int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend);
                Console.WriteLine($"received data, {count}");
                count++;
            };
            #endregion

            #region 2/4 ロボットから音を拾う, 再生デバイス準備
            var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
            var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));

            var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200);
            wavPlayer.Init(new VolumeWaveProvider16(wavProvider));
            wavPlayer.Play();
            #endregion

            #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る
            var objBuilder = QiObjectBuilder.Create();
            //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応
            objBuilder.AdvertiseMethod(
                "processRemote::v(iimm)",
                (sig, arg) =>
                {
                    //ここで処理
                    //Console.WriteLine("Received Buffer!");
                    //Console.WriteLine(arg.Dump());

                    //データの内容については上記のダンプを行うことである程度確認可能
                    byte[] raw = arg[3].ToBytes();
                    wavProvider.AddSamples(raw, 0, raw.Length);

                    return QiValue.Void;
                });

            //上記のコールバック取得用サービスを登録
            session.Listen("tcp://0.0.0.0:0").Wait();
            ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL);

            #endregion

            #region 4/4 設定を調整して実際に入出力を行う
            //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 
            audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0);

            //開始
            audioDevice["subscribe"].Call(serviceName);
            waveIn.StartRecording();
            #endregion

            Console.WriteLine("Press ENTER to quit..");
            Console.ReadLine();

            audioDevice["unsubscribe"].Call(serviceName);
            session.UnregisterService((uint)registeredId);
            wavPlayer.Stop();
            wavPlayer.Dispose();

            waveIn.StopRecording();
            waveIn.Dispose();
        }
Exemple #16
0
 /// <summary>
 /// Tries creating a wasapi interface.
 /// </summary>
 private static bool _CreateWasapi(out IWavePlayer Player)
 {
     try
     {
         Player = new WasapiOut(AudioClientShareMode.Shared, 300);
         return true;
     }
     catch
     {
         Player = null;
         return false;
     }
 }
 public NAudioWasapi(IWaveProvider provider)
 {
     _waveProvider = provider;
     _soundOutput = new WasapiOut(AudioClientShareMode.Shared, 100);
 }
        private void StopAndCloseStream()
        {
            //wasapiOutConvert
            if (wasapiOutDevice != null)
            {
                wasapiOutDevice.Stop();
            }
            if (activeStream != null)
            {
                inputStream.Close();
                inputStream = null;
                ActiveStream.Close();
                ActiveStream = null;
            }
            if (wasapiOutDevice != null)
            {
                wasapiOutDevice.Dispose();

                wasapiOutDevice = null;
            }
            if (reader != null)
            {
                try
                {
                    // If user hits next before reader is defined, this will be caught
                    reader.Dispose();
                }
                catch
                {
                    // when caught, restart method to reader time to update (Works fine)
                    StopAndCloseStream();
                }
            }
        }
        private void ReinitaliseWaveOut()
        {
            if (FIsInitialised && FWaveOut != null)
            {
                FWaveOut.Dispose();
            }

            FWaveOut = new WasapiOut(FPinInDevice[0], AudioClientShareMode.Shared, false, 100);
            FWaveOut.Init(FWaveStream);

			FPinOutDuration[0] = (float)FWaveStream.TotalTime.TotalSeconds;
            FIsInitialised = true;
        }
        /// <summary>
        /// 再生する
        /// </summary>
        /// <param name="deviceID">再生デバイスID</param>
        /// <param name="waveFile">wavファイル</param>
        /// <param name="isDelete">再生後に削除する</param>
        /// <param name="volume">ボリューム</param>
        public static void Play(
            string deviceID,
            string waveFile,
            bool isDelete,
            int volume)
        {
            var sw = Stopwatch.StartNew();

            var volumeAsFloat = ((float)volume / 100f);

            try
            {
                IWavePlayer player = null;
                IWaveProvider provider = null;

                switch (TTSYukkuriConfig.Default.Player)
                {
                    case WavePlayers.WaveOut:
                        player = new WaveOut()
                        {
                            DeviceNumber = int.Parse(deviceID),
                            DesiredLatency = PlayerLatencyWaveOut,
                        };
                        break;

                    case WavePlayers.DirectSound:
                        player = new DirectSoundOut(
                            Guid.Parse(deviceID),
                            PlayerLatencyDirectSoundOut);
                        break;

                    case WavePlayers.WASAPI:
                        player = new WasapiOut(
                            deviceEnumrator.GetDevice(deviceID),
                            AudioClientShareMode.Shared,
                            false,
                            PlayerLatencyWasapiOut);
                        break;

                    case WavePlayers.ASIO:
                        player = new AsioOut(deviceID);
                        break;
                }

                if (player == null)
                {
                    return;
                }

                provider = new AudioFileReader(waveFile)
                {
                    Volume = volumeAsFloat
                };

                player.Init(provider);
                player.PlaybackStopped += (s, e) =>
                {
                    player.Dispose();

                    var file = provider as IDisposable;
                    if (file != null)
                    {
                        file.Dispose();
                    }

                    if (isDelete)
                    {
                        File.Delete(waveFile);
                    }
                };

                // 再生する
                player.Play();
            }
            catch (Exception ex)
            {
                ActGlobals.oFormActMain.WriteExceptionLog(
                    ex,
                    "サウンドの再生で例外が発生しました。");
            }
            finally
            {
                sw.Stop();
                Debug.WriteLine(
                    "PlaySound ({0}) -> {1:N0} ticks",
                    TTSYukkuriConfig.Default.Player,
                    sw.ElapsedTicks);
            }
        }
        /// <summary>
        /// Initialize the NAudio framework
        /// </summary>
        private void InitializeNAudioLibrary()
        {
            try
            {
                m_latency = Properties.Settings.Default.Latency;

                m_logger.Info("OS Info: " + Environment.OSVersion.ToString());

                // string soundOutput = "WasapiOut";
                string soundOutput = "WaveOut";

                // Set the wave output device based on the configuration setting
                switch (soundOutput)
                {
                    case "WasapiOut":
                        m_waveOutDevice = new WasapiOut(global::NAudio.CoreAudioApi.AudioClientShareMode.Shared, m_latency);
                        break;

                    case "DirectSound":
                        m_waveOutDevice = new DirectSoundOut(m_latency);
                        break;

                    default:
                    case "WaveOut":
                        m_waveOutDevice = new WaveOut();
                        break;
                }

                m_waveOutDevice.PlaybackStopped += waveOutDevice_PlaybackStopped;
                m_logger.Info("Wave Output Device that is actually being used: {0}", m_waveOutDevice.GetType().ToString());
            }
            catch (Exception driverCreateException)
            {
                m_logger.ErrorException("NAudio Driver Creation Failed", driverCreateException);
                throw driverCreateException;
            }
        }
Exemple #22
0
 public WasapiOut(AudioClientShareMode shareMode, int latency) : this(WasapiOut.GetDefaultAudioEndpoint(), shareMode, true, latency)
 {
 }
Exemple #23
0
 public void Open(int deviceID = 0)
 {
     Close();
     for (int i = 0; i < noteMode.Length; i++)
     {
         noteMode[i] = NoteStyle.Regular;
     }
     this.midiOut = new MidiOut(deviceID);
     this.waveOut = new WasapiOut(AudioClientShareMode.Shared, 5);
     this.waveOut.Init(this.mixer);
     this.waveOut.Play();
 }
		private void RestartAudio()
		{
			if(FWaveOut != null)
			{
				Dispose();
			}
			
			if(FInput[0] != null)
			{
				FWaveOut = new WasapiOut(AudioClientShareMode.Shared, 4);
	
				FWaveProvider = new SampleToWaveProvider(FInput[0]);
				FWaveOut.Init(FWaveProvider);
				FWaveOut.Play();
			}

		}		
        private void ReinitaliseWaveOut()
        {
            if (FIsInitialised && FWaveOut != null)
            {
                FWaveOut.Dispose();
            }

            FWaveOut = new WasapiOut(FPinInDevice[0], AudioClientShareMode.Shared, false, 100);
            FWaveOut.Init(FWaveStream);

            FIsInitialised = true;
        }