public XAudio2Engine() { #if DEBUG XAudio2Flags flags = XAudio2Flags.DebugEngine; #else XAudio2Flags flags = XAudio2Flags.None; #endif XAudio2 = new XAudio2(flags, ProcessorSpecifier.DefaultProcessor); #if DEBUG DebugConfiguration debugConfig = new DebugConfiguration(); debugConfig.BreakMask = (int)LogType.Warnings; debugConfig.TraceMask = (int) (LogType.Errors | LogType.Warnings | LogType.Information | LogType.Detail | LogType.ApiCalls | LogType.FunctionCalls | LogType.Timing | LogType.Locks | LogType.Memory | LogType.Streaming); debugConfig.LogThreadID = new RawBool(true); debugConfig.LogFileline = new RawBool(true); debugConfig.LogFunctionName = new RawBool(true); debugConfig.LogTiming = new RawBool(true); XAudio2.SetDebugConfiguration(debugConfig, IntPtr.Zero); #endif XAudio2.CriticalError += (s, e) => Console.WriteLine("XAudio2: Critical Error. " + e.ToString()); MasteringVoice _masteringVoice = new MasteringVoice(XAudio2); X3DAudio = new X3DAudio(SharpDX.Multimedia.Speakers.Stereo); ResourceFactory = new XAudio2ResourceFactory(this); _x3dListener = new Listener(); _x3dListener.OrientFront = new RawVector3(0, 0, 1); _x3dListener.OrientTop = new RawVector3(0, 1, 0); }
public unsafe XAudio2Renderer() { waveFormat = new WaveFormat(); waveFormat.FormatTag = WaveFormatTag.Pcm; xAudio = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.AnyProcessor); masteringVoice = new MasteringVoice(xAudio, 2, 44100); }
/// <summary> /// Initialize the media element for playback /// </summary> /// <param name="streamConfig">Object containing stream configuration details</param> void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource) { this._streamSource = streamSource; // This code is based upon the MS FFmpegInterop project on GitHub VideoEncodingProperties videoProps = VideoEncodingProperties.CreateH264(); videoProps.ProfileId = H264ProfileIds.High; videoProps.Width = (uint)streamConfig.GetWidth(); videoProps.Height = (uint)streamConfig.GetHeight(); videoProps.Bitrate = (uint)streamConfig.GetBitrate(); _videoMss = new MediaStreamSource(new VideoStreamDescriptor(videoProps)); _videoMss.BufferTime = TimeSpan.Zero; _videoMss.CanSeek = false; _videoMss.Duration = TimeSpan.Zero; _videoMss.SampleRequested += _videoMss_SampleRequested; XAudio2 xaudio = new XAudio2(); MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000); WaveFormat format = new WaveFormat(48000, 16, 2); // Set for low latency playback StreamDisplay.RealTimePlayback = true; // Render on the full window to avoid extra compositing StreamDisplay.IsFullWindow = true; // Disable built-in transport controls StreamDisplay.AreTransportControlsEnabled = false; StreamDisplay.SetMediaStreamSource(_videoMss); AvStream.SetSourceVoice(new SourceVoice(xaudio, format)); }
/// <summary> /// Initializes the sound library for playback. /// </summary> /// <param name="root">The root directory of the sounds.</param> public static void initialize(String root) { setRootDirectory(root); SoundPath = "s"; NSoundPath = SoundPath + "\\n"; NumPath = NSoundPath + "\\ns"; mainSoundDevice = new XAudio2(); mainMasteringVoice = new MasteringVoice(mainSoundDevice); if (mainSoundDevice.Version == XAudio2Version.Version27) { WaveFormatExtensible deviceFormat = mainSoundDevice.GetDeviceDetails(0).OutputFormat; x3DAudio = new X3DAudio(deviceFormat.ChannelMask); } else { x3DAudio = new X3DAudio((Speakers)mainMasteringVoice.ChannelMask); } musicDevice = new XAudio2(); musicMasteringVoice = new MasteringVoice(musicDevice); alwaysLoudDevice = new XAudio2(); alwaysLoudMasteringVoice = new MasteringVoice(alwaysLoudDevice); cutScenesDevice = new XAudio2(); cutScenesMasteringVoice = new MasteringVoice(cutScenesDevice); //get the listener: setListener(); }
static SoundEffect() { // This cannot fail. Device = new XAudio2(); try { Device.StartEngine(); // Let windows autodetect number of channels and sample rate. MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate); MasterVoice.SetVolume(_masterVolume, 0); // The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout. Speakers = (Speakers)MasterVoice.ChannelMask; } catch { // Release the device and null it as // we have no audio support. Device.Dispose(); Device = null; MasterVoice = null; } }
public void Reset() { isPlaying = false; buffer.Dispose(); sourceVoice.FlushSourceBuffers(); buffer = null; sourceVoice.Dispose(); sourceVoice = null; masteringVoice.Dispose(); masteringVoice = null; masteringVoice = new MasteringVoice(device, waveFormat.Channels, waveFormat.SamplesPerSecond); sourceVoice = new SourceVoice(device, waveFormat, VoiceFlags.None); // sourceVoice.BufferStart += new System.EventHandler<ContextEventArgs>(sourceVoice_BufferStart); sourceVoice.Volume = 0.5f; buffer = new AudioBuffer(); buffer.AudioData = new System.IO.MemoryStream(); bytesPerSample = (waveFormat.BitsPerSample / 8) * waveFormat.Channels; for (int i = 0; i < BUFFER_COUNT; i++) { sampleData[i] = new short[SAMPLE_SIZE * waveFormat.Channels]; bData[i] = new byte[SAMPLE_SIZE * bytesPerSample]; } sourceVoice.SubmitSourceBuffer(buffer); currentBuffer = 0; playBuffer = 0; samplePos = 0; }
public MyAudio(float soundEffectVolume) { DirectoryInfo directoryInfo = new DirectoryInfo("music"); int length = 0; foreach (FileInfo file in directoryInfo.GetFiles()) { if (file.Name.Split('.')[1] == "ogg") { ++length; this.musicNames.Add(file.Name); } } this.musiclength = new int[2, length]; try { this.waveOut = new WaveOut(-1, 44100, 16, 2); this.xaDevice = new SlimDX.XAudio2.XAudio2(); this.xaMaster = new MasteringVoice(this.xaDevice); this.SoundEffectVolume = soundEffectVolume; } catch { this.disabled = true; } this.thread_1 = new Thread(new ThreadStart(this.Init)); this.thread_1.Start(); this.pianoNotePlayer = new PianoNotePlayer(); }
/// <summary> /// Não necessariamente um dispose. /// </summary> public void Dispose() { StopEngine(); XAudio2.Dispose(); XAudio2 = null; MasteringVoice = null; }
public WaveManager() { xAudio = new XAudio2(); var mastering = new MasteringVoice(xAudio); mastering.SetVolume(1, 0); xAudio.StartEngine(); }
private void DisposeMasteringVoice() { if (MasteringVoice != null) { MasteringVoice.Dispose(); } MasteringVoice = null; }
private void SetAudioDevice(int deviceIndex) { if (streamSlots != null) { foreach (var s in streamSlots.Values) { s.TemporaryShutdown(); } } if (freeVoices != null) { foreach (var v in freeVoices) { v.TemporaryShutdown(); } } if (usedVoices != null) { foreach (var v in usedVoices) { v.TemporaryShutdown(); } } if (masteringVoice != null) { masteringVoice.Dispose(); X3DInstance.Dispose(); masteringVoice = null; X3DInstance = null; } masteringVoice = new MasteringVoice(AudioDevice, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceIndex); DeviceDetails = AudioDevice.GetDeviceDetails(deviceIndex); X3DInstance = new X3DAudio(DeviceDetails.OutputFormat.ChannelMask, 340f); if (streamSlots != null) { foreach (var s in streamSlots.Values) { s.ShutdownRevival(); } } if (freeVoices != null) { foreach (var v in freeVoices) { v.ShutdownRevival(); } } if (usedVoices != null) { foreach (var v in usedVoices) { v.ShutdownRevival(); } } }
// CONSTRUCXTOR public SoundX(int SampleRate) { this.SampleRate = SampleRate; Stopped = true; if (anyInitFail) { return; } try { frameBuffer = new FrameBuffer <short>(SampleRate / FRAMES_PER_SECOND * CHANNELS, SampleRate / 20); noise = new Noise(SampleRate, MAX_SOUND_AMPLITUDE); xaudio = new XAudio2(); masteringVoice = new MasteringVoice(xaudio, CHANNELS, SampleRate); bufferEndEvent = new AutoResetEvent(false); var frameSizeBytes = SampleRate / FRAMES_PER_SECOND * CHANNELS * 2; for (int i = 0; i < RING_SIZE; i++) { audioBuffersRing[i] = new AudioBuffer() { AudioBytes = frameSizeBytes, LoopCount = 0, Flags = BufferFlags.None, }; memBuffers[i].Size = frameSizeBytes; memBuffers[i].Pointer = Utilities.AllocateMemory(memBuffers[i].Size); } sourceVoice = new SourceVoice(xaudio, new WaveFormat(SampleRate, BITS_PER_SAMPLE, CHANNELS), true); xaudio.StartEngine(); sourceVoice.BufferEnd += (o) => bufferEndEvent?.Set(); sourceVoice.Start(); playingTask = Task.Factory.StartNew(Loop, TaskCreationOptions.LongRunning); enabled = false; on = false; mute = false; Stopped = false; } catch (Exception) { anyInitFail = true; enabled = false; Stopped = true; } }
public SoundManager(int sounds) { _audio = new XAudio2(); _masteringVoice = new MasteringVoice(_audio); _masteringVoice.SetVolume(0.5f); _soundStreams = new SoundStream[sounds]; _audioBuffers = new AudioBuffer[sounds]; _sourceVoices = new SourceVoice[sounds]; }
public XAudioDevice() { if (StackTraceExtensions.StartedFromNUnitConsoleButNotFromNCrunch) { return; } XAudio = new XAudio2(); MasteringVoice = new MasteringVoice(XAudio); }
public SoundManager(int cntVoices) { audio = new XAudio2(); masteringVoice = new MasteringVoice(audio); masteringVoice.SetVolume(0.5f); voices = new SourceVoice[cntVoices]; buffers = new AudioBuffer[cntVoices]; streams = new SoundStream[cntVoices]; }
/// <summary> /// Initializes XAudio. /// </summary> internal static void PlatformInitialize() { try { if (Device == null) { #if !WINDOWS_UAP && DEBUG try { //Fails if the XAudio2 SDK is not installed Device = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.DefaultProcessor); Device.StartEngine(); } catch #endif { Device = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor); Device.StartEngine(); } } // Just use the default device. #if WINDOWS_UAP string deviceId = null; #else const int deviceId = 0; #endif if (MasterVoice == null) { // Let windows autodetect number of channels and sample rate. MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate); } // The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout. #if WINDOWS_UAP Speakers = (Speakers)MasterVoice.ChannelMask; #else Speakers = Device.Version == XAudio2Version.Version27 ? Device.GetDeviceDetails(deviceId).OutputFormat.ChannelMask: (Speakers)MasterVoice.ChannelMask; #endif } catch { // Release the device and null it as // we have no audio support. if (Device != null) { Device.Dispose(); Device = null; } MasterVoice = null; } }
/// <summary> /// /// </summary> public override void Initialize() { try { if (Device == null) { Device = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor, XAudio2Version.Version27); Device.StartEngine(); } var DeviceFormat = Device.GetDeviceDetails(0).OutputFormat; // Just use the default device. const int deviceId = 0; if (MasterVoice == null) { // Let windows autodetect number of channels and sample rate. MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceId); MasterVoice.SetVolume(_masterVolume, 0); } // The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout. var deviceDetails = Device.GetDeviceDetails(deviceId); Speakers = deviceDetails.OutputFormat.ChannelMask; var dev3d = Device3D; Log.Debug("Audio devices :"); for (int devId = 0; devId < Device.DeviceCount; devId++) { var device = Device.GetDeviceDetails(devId); Log.Debug("[{1}] {0}", device.DisplayName, devId); Log.Debug(" role : {0}", device.Role); Log.Debug(" id : {0}", device.DeviceID); } } catch (Exception e) { Log.Error(e.ToString()); // Release the device and null it as // we have no audio support. if (Device != null) { Device.Dispose(); Device = null; } MasterVoice = null; } soundWorld = new SoundWorld(Game); }
private static void InitializeXAudio2() { // This is mandatory when using any of SharpDX.MediaFoundation classes MediaManager.Startup(); // Starts The XAudio2 engine xaudio2 = new XAudio2(); xaudio2.StartEngine(); masteringVoice = new MasteringVoice(xaudio2); }
public Program() { audioDevice = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.AnyProcessor); masteringVoice = new MasteringVoice(audioDevice, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, 0); DeviceDetails deviceDetails = audioDevice.GetDeviceDetails(0); x3DInstance = new X3DAudio(deviceDetails.OutputFormat.ChannelMask, 340f); //x3d.Calculate(listener, emitter, SlimDX.X3DAudio.CalculateFlags.ZeroCenter, 2, 2); }
static XAudio2PlaybackEngine() { device = new AudioDevice(); voice = new MasteringVoice(device); AppDomain.CurrentDomain.ProcessExit += (o, args) => { voice.Dispose(); device.Dispose(); }; }
/// <summary> /// Инициализирует новый экзепляр класса <see cref="SoundService"/>. /// </summary> public SoundService() { cachedBuffers = new Dictionary <string, AudioBufferAndMetaData>(); lockObject = new object(); xAudio = new XAudio2(); masteringVoice = new MasteringVoice(xAudio); masteringVoice.SetVolume(1, 0); xAudio.StartEngine(); }
internal SoundBuffer(AudioMixer audioMixer, string fileName, int defaultRepeat) { xAudio2 = audioMixer.Mixer; masteringVoice = audioMixer.MasterVoice; this.defaultRepeat = defaultRepeat; this.fileName = fileName; voiceList = new Dictionary <int, SourceVoice>(); voiceListKey = 0; isLoaded = false; }
private void frmRomanSplashScreen_Load(object sender, EventArgs e) { this.Show(); XAudio2 xaudio; Assembly assembly; AudioBuffer logo_buffer; SoundStream logo_soundstream; SourceVoice logo_voice; WaveFormat logo_waveFormat; assembly = Assembly.GetExecutingAssembly(); xaudio = new XAudio2(); var masteringsound = new MasteringVoice(xaudio); logo_soundstream = new SoundStream(assembly.GetManifestResourceStream("Arriba_Ultimate_Study_Guide.Audio.logosong.wav")); logo_waveFormat = logo_soundstream.Format; logo_buffer = new AudioBuffer { Stream = logo_soundstream.ToDataStream(), AudioBytes = (int)logo_soundstream.Length, Flags = BufferFlags.EndOfStream }; logo_voice = new SourceVoice(xaudio, logo_waveFormat, true); logo_voice.SubmitSourceBuffer(logo_buffer, logo_soundstream.DecodedPacketsInfo); logo_voice.Start(); //if (installOnce == false) //{ // try // { // RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Bold.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-BoldItalic.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-ExtraBold.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-ExtraBoldItalic.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Italic.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Light.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-LightItalic.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Regular.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Semibold.ttf"); // //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-SemiboldItalic.ttf"); // } // catch (IOException error) // { // if (error.Source != null) // MessageBox.Show("Cannot install OpenSans fonts from resource. IOException source: {0}, " + error.Source, "Arriba Ultimate Study Guide", MessageBoxButtons.OK, MessageBoxIcon.Error); // throw; // } // installOnce = true; //} }
public XAudio2SoundOutput(Sound sound) { _sound = sound; _device = new XAudio2(); int? deviceIndex = Enumerable.Range(0, _device.DeviceCount) .Select(n => (int?)n) .FirstOrDefault(n => _device.GetDeviceDetails(n.Value).DisplayName == Global.Config.SoundDevice); _masteringVoice = deviceIndex == null ? new MasteringVoice(_device, Sound.ChannelCount, Sound.SampleRate) : new MasteringVoice(_device, Sound.ChannelCount, Sound.SampleRate, deviceIndex.Value); }
/// <summary> /// Sets up XAudio2 for Ogg playback. /// </summary> /// <returns>True on success and false on failure. Failure is likely the cause of XAudio2 missing from the system.</returns> public static bool initializeOgg() { try { sizeOfBuffer = BufferSize.medium; xAudio2Device = new XAudio2(); masteringVoice = new MasteringVoice(xAudio2Device); return(true); } catch (Exception e) { return(false); } }
public Audio(String fileName) { device = new XAudio2(); masteringVoice = new MasteringVoice(device); stream = new SoundStream(File.OpenRead("Content/" + fileName)); buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); }
static AudioDefines() { WaveFormat = new WaveFormat(); XAudio = new XAudio2(); MasteringVoice = new MasteringVoice(XAudio); SubmixVoice = new SubmixVoice(XAudio); SubmixVoice.SetOutputVoices(new[] { new VoiceSendDescriptor(MasteringVoice) }); VoicePool = new VoicePool(XAudio, SubmixVoice); }
public X3DAudioEngine() { _xaudio2 = new XAudio2(); _masteringVoice = new MasteringVoice(_xaudio2); _deviceFormat = _xaudio2.GetDeviceDetails(0).OutputFormat; _x3dAudio = new X3DAudio(_deviceFormat.ChannelMask); Position = new Vector3D(0, 0, 0); Rotation = System.Windows.Media.Media3D.Quaternion.Identity; }
public AudioFx() { _xaudio2 = new XAudio2(); Task.Run(() => { _xaudio2.StartEngine(); _masteringVoice = new MasteringVoice(_xaudio2); }); _cues = new List <Cue>(); }
public void Dispose() { if (_disposed) return; _masteringVoice.Dispose(); _masteringVoice = null; _device.Dispose(); _device = null; _disposed = true; }
public GlobalDataPackage() { TextureObjectDictionary = new Dictionary <string, TextureObject>(); try { DeviceXaudio2 = new XAudio2(); MasteringVoice = new MasteringVoice(DeviceXaudio2); SoundInitSuccess = true; BGM_Player = new Wave_Player(DeviceXaudio2); } catch { MessageBox.Show("音频设备故障", "Sound Initial Error"); } }
/// <summary> /// Sets up XAudio2 for Ogg playback. /// </summary> /// <returns>True on success and false on failure. Failure is likely the cause of XAudio2 missing from the system.</returns> public static bool initializeOgg() { try { sizeOfBuffer = BufferSize.medium; xAudio2Device = new XAudio2(); masteringVoice = new MasteringVoice(xAudio2Device); return(true); } catch (Exception e) { System.Diagnostics.Debug.WriteLine(e.Message + e.StackTrace); return(false); } }
public XAudio2SoundOutput(Sound sound) { _sound = sound; _device = new XAudio2(); int?deviceIndex = Enumerable.Range(0, _device.DeviceCount) .Select(n => (int?)n) .FirstOrDefault(n => _device.GetDeviceDetails(n.Value).DisplayName == Global.Config.SoundDevice); _masteringVoice = deviceIndex == null ? new MasteringVoice(_device, Sound.ChannelCount, Sound.SampleRate) : new MasteringVoice(_device, Sound.ChannelCount, Sound.SampleRate, deviceIndex.Value); }
static void Main() { XAudio2 device = new XAudio2(); MasteringVoice masteringVoice = new MasteringVoice(device); // play a PCM file PlayPCM(device, "MusicMono.wav"); // play a 5.1 PCM wave extensible file PlayPCM(device, "MusicSurround.wav"); masteringVoice.Dispose(); device.Dispose(); }
public XAudio2Driver(Configuration config) { IsDisposed = false; try { _isBusy = new WaitableBool(false); _device = new XAudio2(); _masterVoice = new MasteringVoice(_device); _sourceVoice = new SourceVoice(_device, new WaveFormat() { FormatTag = WaveFormatTag.Pcm, Channels = 2, BitsPerSample = 16, SamplesPerSecond = 32040, AverageBytesPerSecond = 2 * (16 / 8) * 32040, BlockAlignment = 2 * (16 / 8) }, VoiceFlags.None, 2.0F); _sourceVoice.BufferStart += (s, e) => { if (_sourceVoice.State.BuffersQueued < BufferCount) { _isBusy.Value = false; } }; _buffers = new byte[BufferCount][]; _bufferStreams = new DataStream[BufferCount]; for (int i = 0; i < BufferCount; i++) { _buffers[i] = new byte[Snes.MaxAudioBufferLength * 4]; _bufferStreams[i] = new DataStream(_buffers[i], true, false); } _bufferCursor = 0; _audioBuffer = new AudioBuffer(); _isPaused = true; } catch { Dispose(); throw; } }
static Sound() { XAudio2 = new XAudio2(); Master = new MasteringVoice(XAudio2,2); }
public void Create() { device = new XAudio2(); mVoice = new MasteringVoice(device); Reset(); }
public void PlayPPM(IntPtr win) { Rate = 192000; //44100 on cheapo, 96000 on AC97, 192000 on HD Audio // its the number of samples that exist for each second of audio channels = 2; // 1 = mono, 2 = stereo PPMSamples = (int)(0.0225 * Rate * channels); // 22 or 22.5ms in samples, rounded up // no. of bytes per second = channels * rate * bytes in one sample microsec = Rate / 10000.0; // 192 = 1ms, 19.2 = 0.1ms or 1mis @ 192khz PPMchannels = new Dictionary<int, double>(); frame = new List<short>(); Amplitude = 32760; /*WaveFile wFile; wFile = new WaveFile(channels, 16, Rate); */ //Set channels to neutral except throttle, throttle = zero. PPMchannels.Add(1, 10.0); //Throttle PPMchannels.Add(2, 50.0); //Ailerons PPMchannels.Add(3, 50.0); //Stab PPMchannels.Add(4, 50.0); //Rudder PPMchannels.Add(5, 50.0); PPMchannels.Add(6, 50.0); PPMchannels.Add(7, 50.0); PPMchannels.Add(8, 50.0); byte[] data = GenPPM(); /*wFile.SetData(data, data.Length); wFile.WriteFile(@"C:\Users\kang\Desktop\test.wav"); */ ms = new MemoryStream(); ms.SetLength(0); ms.Write(data, 0, data.Length); ms.Position = 0; wf = new WaveFormat(); wf.FormatTag = WaveFormatTag.Pcm; wf.BitsPerSample = (short)16; wf.Channels = channels; wf.SamplesPerSecond = Rate; wf.BlockAlignment = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.SamplesPerSecond * wf.BlockAlignment; device = new XAudio2(); device.StartEngine(); masteringVoice = new MasteringVoice(device); srcVoice = new SourceVoice(device, wf); buffer = new AudioBuffer(); buffer.AudioData = ms; buffer.AudioBytes = (int)data.Length; buffer.Flags = SlimDX.XAudio2.BufferFlags.None; srcVoice.BufferStart += new EventHandler<ContextEventArgs>(srcVoice_BufferStart); srcVoice.FrequencyRatio = 1; srcVoice.SubmitSourceBuffer(buffer); srcVoice.Start(); }
/// <summary> /// Initializes a new instance of the <see cref="Audio"/> class. /// </summary> public Audio() { audio = new XAudio2(); master = new MasteringVoice(audio); sources = new List<SourceVoice>(); }
static void Main() { var form = new RenderForm("DotRocket/SlimDX example"); var description = new SwapChainDescription() { BufferCount = 1, Usage = Usage.RenderTargetOutput, OutputHandle = form.Handle, IsWindowed = true, ModeDescription = new ModeDescription(0, 0, new Rational(60, 1), Format.R8G8B8A8_UNorm), SampleDescription = new SampleDescription(1, 0), Flags = SwapChainFlags.AllowModeSwitch, SwapEffect = SwapEffect.Discard }; // Setup rendering Device device; SwapChain swapChain; Device.CreateWithSwapChain(DriverType.Hardware, DeviceCreationFlags.None, description, out device, out swapChain); RenderTargetView renderTarget; using (var resource = Resource.FromSwapChain<Texture2D>(swapChain, 0)) renderTarget = new RenderTargetView(device, resource); var context = device.ImmediateContext; var viewport = new Viewport(0.0f, 0.0f, form.ClientSize.Width, form.ClientSize.Height); context.OutputMerger.SetTargets(renderTarget); context.Rasterizer.SetViewports(viewport); // Prevent alt+enter (broken on WinForms) using (var factory = swapChain.GetParent<Factory>()) factory.SetWindowAssociation(form.Handle, WindowAssociationFlags.IgnoreAltEnter); // Setup audio-streaming XAudio2 xaudio2 = new XAudio2(); stream = new XWMAStream("tune.xwma"); MasteringVoice masteringVoice = new MasteringVoice(xaudio2); sourceVoice = new SourceVoice(xaudio2, stream.Format); audioBuffer = new AudioBuffer(); audioBuffer.AudioData = stream; audioBuffer.AudioBytes = (int)stream.Length; audioBuffer.Flags = BufferFlags.EndOfStream; sourceVoice.SubmitSourceBuffer(audioBuffer, stream.DecodedPacketsInfo); sourceVoice.Start(); // Setup DotRocket #if DEBUG DotRocket.Device rocket = new DotRocket.ClientDevice("sync"); rocket.OnPause += Pause; rocket.OnSetRow += SetRow; rocket.OnIsPlaying += IsPlaying; rocket.Connect("localhost", 1338); #else DotRocket.Device rocket = new DotRocket.PlayerDevice("sync"); #endif // Get our belowed tracks! DotRocket.Track clear_r = rocket.GetTrack("clear.r"); DotRocket.Track clear_g = rocket.GetTrack("clear.g"); DotRocket.Track clear_b = rocket.GetTrack("clear.b"); MessagePump.Run(form, () => { // Hammertime. double row = ((double)(sourceVoice.State.SamplesPlayed - samplesBias) / stream.Format.SamplesPerSecond) * rowRate; // Paint some stuff. rocket.Update((int)System.Math.Floor(row)); context.ClearRenderTargetView(renderTarget, new Color4( clear_r.GetValue(row), clear_g.GetValue(row), clear_b.GetValue(row))); swapChain.Present(0, PresentFlags.None); }); // clean up all resources // anything we missed will show up in the debug output renderTarget.Dispose(); swapChain.Dispose(); device.Dispose(); }