void c_valueChanged(object sender, EventArgs e) { if (discard_events) { return; } var verter = (Verter)sender; var ev = verter.eventType; if (ev == Verter.EventType.mute) { chdev(0); return; } if (ev == Verter.EventType.solo) { discard_events = true; for (var a = 0; a < gVu.Length; a++) { gVu[a].enabled = false; } ((Verter)sender).enabled = true; discard_events = false; chdev(0); return; } if (ev == Verter.EventType.airhorn) { gPreviewOn.Checked = false; chdev(0); if (unFxTimer == null) { unFxTimer = new Timer(); unFxTimer.Interval = 3000; unFxTimer.Tick += delegate(object oa, EventArgs ob) { unFX(); }; } unFX(); fx_stream = System.Reflection.Assembly.GetExecutingAssembly().GetManifestResourceStream("Loopstream.res.sc.wav"); fx_wav = new NAudio.Wave.WaveFileReader(fx_stream); var prov2 = new NPatch.ChannelMapperOut(fx_wav.ToSampleProvider(), new int[] { (int)verter.Tag }, src.wf.Channels); fx_out = new NAudio.Wave.WasapiOut(src.mm, NAudio.CoreAudioApi.AudioClientShareMode.Shared, false, 100); fx_out.Init(prov2); fx_out.Play(); unFxTimer.Start(); } }
static void PlayWithWasapi() { using (IWavePlayer output = new WasapiOut(AudioClientShareMode.Shared, 10)) using (PdProvider pd = new PdProvider()) { output.Init(pd); output.Play(); Console.ReadLine(); output.Stop(); } }
public AudioManager() { _mixerStream = new WaveMixerStream32 { AutoStop = false }; _soundPlayer = new AudioOut(AudioClientShareMode.Shared, 60); _soundPlayer.Init(_mixerStream); Sfx = new SfxManager(this); _soundPlayer.Play(); }
public void NotifyDefaultChanged(IAudioDevice audioDevice) { if (audioDevice.Type != AudioDeviceType.Playback) return; var task = new Task(() => { var device = _deviceEnumerator.GetDevice(audioDevice.Id); using (var output = new WasapiOut(device, AudioClientShareMode.Shared, true, 10)) { output.Init(new WaveFileReader(Resources.NotificationSound)); output.Play(); while (output.PlaybackState == PlaybackState.Playing) { Thread.Sleep(500); } } }); task.Start(); }
private void PlayStream(WaveStream waveStream) { new Thread(() => { using (waveStream) { volumeStream = new WaveChannel32(waveStream) { Volume = volumeControl.CurrentVolume, PadWithZeroes = true }; Output = new WasapiOut(AudioClientShareMode.Shared, false, 300); using (Output) { Output.Init(volumeStream); Output.Play(); while (volumeStream.Position < volumeStream.Length & !reset) { Thread.Sleep(100); } } Output = null; if(!reset) RaisePlaybackEnded(); reset = false; } }).Start(); }
public static void PlaySong(IEnumerable<Track> tracks) { var enumerator = new MMDeviceEnumerator(); var defaultDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); var waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(defaultDevice.AudioClient.MixFormat.SampleRate, 1); var wasapiOut = new WasapiOut(AudioClientShareMode.Shared, false, 60); MediaBankBase bank = new FenderStratCleanB(waveFormat); MediaBankBase bankBass = new RockdaleBassBridge(waveFormat); var mixer = new MixingSampleProvider(waveFormat); var trackSampleProviders = tracks.Select(t => new TrackSampleProvider(t.Patch == MediaPatch.CleanGuitar ? bank : bankBass, t)) .ToArray(); var playedTracks = new List<int>(); foreach(var track in trackSampleProviders) { track.OnPhrasePlaying += (sender, args) => { var channel = args.Track.Channel; var phrase = args.Phrase; if(playedTracks.Contains(channel)) { AsyncConsole.WriteLine(); PrintUtils.PrintContentTable(); playedTracks.Clear(); } PrintUtils.PrintContent(phrase.Notes != null && phrase.Notes.Length > 0 ? string.Join(",", phrase.Notes) : phrase.Command.ToString(), channel); playedTracks.Add(channel); }; mixer.AddMixerInput(track); } wasapiOut.Init(new VolumeSampleProvider(mixer) { Volume = 0.7f }); PrintUtils.Init(trackSampleProviders.Length); PrintUtils.PrintHeaderOfTable(); PrintUtils.PrintRowDividerTable(); PrintUtils.PrintContentTable(); wasapiOut.Play(); var resetEvent = new ManualResetEvent(false); wasapiOut.PlaybackStopped += (sender, args) => { resetEvent.Set(); if(args.Exception != null) { throw args.Exception; } }; resetEvent.WaitOne(); Console.WriteLine(); PrintUtils.PrintFooterOfTable(); }
/// <summary> /// Initialize Wasapi and starts playing /// </summary> private void InitializeWasapi() { if (_wasapiOut != null) _wasapiOut.Dispose(); _wasapiOut = new WasapiOut(AudioClientShareMode.Shared, true, 500); _wasapiOut.Init(_waveProvider); _wasapiOut.Play(); }
public Task Start(BuildState state, BuildStatus status) { var tcs = new TaskCompletionSource<byte>(); TrackableWaveChannel stream = null; WasapiOut device = null; try { var fileName = ""; switch (status) { case BuildStatus.Broken: fileName = configuration.Broken; break; case BuildStatus.StillBroken: fileName = configuration.StillBroken; break; case BuildStatus.Fixed: fileName = configuration.Fixed; break; default: throw new ArgumentOutOfRangeException("status"); } if (File.Exists(fileName)) { device = new WasapiOut(AudioClientShareMode.Shared, 100); stream = new TrackableWaveChannel(BuildStream(fileName)); device.Init(stream); device.Play(); stream.Finished += (sender, args) => { tcs.SetResult(0); stream.Dispose(); device.Dispose(); stream = null; device = null; }; } else { var task = tcs.Task; tcs.SetResult(0); return task; } } catch (Exception e) { tcs.SetException(e); if (stream != null) { stream.Dispose(); stream = null; } if (device != null) { device.Dispose(); device = null; } } return tcs.Task; }
private void RestartAudio() { if(FWaveOut != null) { Dispose(); } if(FInput[0] != null) { FWaveOut = new WasapiOut(AudioClientShareMode.Shared, 4); FWaveProvider = new SampleToWaveProvider(FInput[0]); FWaveOut.Init(FWaveProvider); FWaveOut.Play(); } }
public static void Execute(QiSession session) { string serviceName = "CSharpSoundDownloaderSpare"; var audioDevice = session.GetService("ALAudioDevice"); var waveIn = new WaveInEvent(); #region 1/4: ロボットへ音を投げる方の仕込み //出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる //16000, 22050, 44100, 48000のいずれかしか選択できない点に注意 audioDevice["setParameter"].Call("outputSampleRate", 16000); //下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。 //バッファ長は16384を超えてはいけない点に注意 //(詳細は公式ドキュメンテーション参照) waveIn.BufferMilliseconds = 200; //マイクの集音時フォーマット: 周波数を上で設定した値に合わせる waveIn.WaveFormat = new WaveFormat(16000, 16, 2); int count = 0; waveIn.DataAvailable += (_, e) => { if (e.BytesRecorded > 16384) return; byte[] bufferToSend = new byte[e.BytesRecorded]; Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded); int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend); Console.WriteLine($"received data, {count}"); count++; }; #endregion #region 2/4 ロボットから音を拾う, 再生デバイス準備 var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1)); var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200); wavPlayer.Init(new VolumeWaveProvider16(wavProvider)); wavPlayer.Play(); #endregion #region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る var objBuilder = QiObjectBuilder.Create(); //コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応 objBuilder.AdvertiseMethod( "processRemote::v(iimm)", (sig, arg) => { //ここで処理 //Console.WriteLine("Received Buffer!"); //Console.WriteLine(arg.Dump()); //データの内容については上記のダンプを行うことである程度確認可能 byte[] raw = arg[3].ToBytes(); wavProvider.AddSamples(raw, 0, raw.Length); return QiValue.Void; }); //上記のコールバック取得用サービスを登録 session.Listen("tcp://0.0.0.0:0").Wait(); ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL); #endregion #region 4/4 設定を調整して実際に入出力を行う //マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411 audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0); //開始 audioDevice["subscribe"].Call(serviceName); waveIn.StartRecording(); #endregion Console.WriteLine("Press ENTER to quit.."); Console.ReadLine(); audioDevice["unsubscribe"].Call(serviceName); session.UnregisterService((uint)registeredId); wavPlayer.Stop(); wavPlayer.Dispose(); waveIn.StopRecording(); waveIn.Dispose(); }