Ejemplo n.º 1
0
		public unsafe XAudio2Renderer()
		{
			waveFormat = new WaveFormat();
			waveFormat.FormatTag = WaveFormatTag.Pcm;
			xAudio = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.AnyProcessor);
			masteringVoice = new MasteringVoice(xAudio, 2, 44100);
		}
Ejemplo n.º 2
0
		public override void Dispose(bool disposing)
		{
			if (disposing)
			{
				if (sourceVoice != null)
				{
					sourceVoice.FlushSourceBuffers();
					sourceVoice.Stop();
					sourceVoice.Dispose();
					sourceVoice = null;
				}
				if (audioBuffer != null)
				{
					audioBuffer.AudioData.Dispose();
					audioBuffer.AudioData = null; // Just to be clean…
					audioBuffer.Dispose();
					audioBuffer = null;
				}
				if (xAudio != null)
				{
					xAudio.StopEngine();
					xAudio.Dispose();
					xAudio = null;
				}
			}
		}
Ejemplo n.º 3
0
        static void PlayPCM(XAudio2 device, string fileName)
        {
            //WaveStream stream = new WaveStream(fileName);
            var s = System.IO.File.OpenRead(fileName);
            WaveStream stream = new WaveStream(s);
            s.Close();

            AudioBuffer buffer = new AudioBuffer();
            buffer.AudioData = stream;
            buffer.AudioBytes = (int)stream.Length;
            buffer.Flags = BufferFlags.EndOfStream;

            SourceVoice sourceVoice = new SourceVoice(device, stream.Format);
            sourceVoice.SubmitSourceBuffer(buffer);
            sourceVoice.Start();

            // loop until the sound is done playing
            while (sourceVoice.State.BuffersQueued > 0)
            {
                if (GetAsyncKeyState(VK_ESCAPE) != 0)
                    break;

                Thread.Sleep(10);
            }

            // wait until the escape key is released
            while (GetAsyncKeyState(VK_ESCAPE) != 0)
                Thread.Sleep(10);

            // cleanup the voice
            buffer.Dispose();
            sourceVoice.Dispose();
            stream.Dispose();
        }
Ejemplo n.º 4
0
		public static IEnumerable<string> GetDeviceNames()
		{
			using (XAudio2 device = new XAudio2())
			{
				return Enumerable.Range(0, device.DeviceCount).Select(n => device.GetDeviceDetails(n).DisplayName).ToList();
			}
		}
Ejemplo n.º 5
0
        public Program()
        {
            audioDevice = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.AnyProcessor);
            masteringVoice = new MasteringVoice(audioDevice, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, 0);

            DeviceDetails deviceDetails = audioDevice.GetDeviceDetails(0);
            x3DInstance = new X3DAudio(deviceDetails.OutputFormat.ChannelMask, 340f);

            //x3d.Calculate(listener, emitter, SlimDX.X3DAudio.CalculateFlags.ZeroCenter, 2, 2);
        }
Ejemplo n.º 6
0
		public XAudio2SoundOutput(Sound sound)
		{
			_sound = sound;
			_device = new XAudio2();
			int? deviceIndex = Enumerable.Range(0, _device.DeviceCount)
				.Select(n => (int?)n)
				.FirstOrDefault(n => _device.GetDeviceDetails(n.Value).DisplayName == Global.Config.SoundDevice);
			_masteringVoice = deviceIndex == null ?
				new MasteringVoice(_device, Sound.ChannelCount, Sound.SampleRate) :
				new MasteringVoice(_device, Sound.ChannelCount, Sound.SampleRate, deviceIndex.Value);
		}
Ejemplo n.º 7
0
		public void Dispose()
		{
			if (_disposed) return;

			_masteringVoice.Dispose();
			_masteringVoice = null;

			_device.Dispose();
			_device = null;

			_disposed = true;
		}
Ejemplo n.º 8
0
        static void Main()
        {
            XAudio2 device = new XAudio2();

            MasteringVoice masteringVoice = new MasteringVoice(device);

            // play a PCM file
            PlayPCM(device, "MusicMono.wav");

            // play a 5.1 PCM wave extensible file
            PlayPCM(device, "MusicSurround.wav");

            masteringVoice.Dispose();
            device.Dispose();
        }
Ejemplo n.º 9
0
        public XAudio2Driver(Configuration config)
        {
            IsDisposed = false;

            try
            {
                _isBusy = new WaitableBool(false);

                _device      = new XAudio2();
                _masterVoice = new MasteringVoice(_device);
                _sourceVoice = new SourceVoice(_device,
                                               new WaveFormat()
                                               {
                                                   FormatTag             = WaveFormatTag.Pcm,
                                                   Channels              = 2,
                                                   BitsPerSample         = 16,
                                                   SamplesPerSecond      = 32040,
                                                   AverageBytesPerSecond = 2 * (16 / 8) * 32040,
                                                   BlockAlignment        = 2 * (16 / 8)
                                               },
                                               VoiceFlags.None, 2.0F);
                _sourceVoice.BufferStart += (s, e) =>
                {
                    if (_sourceVoice.State.BuffersQueued < BufferCount)
                    {
                        _isBusy.Value = false;
                    }
                };

                _buffers       = new byte[BufferCount][];
                _bufferStreams = new DataStream[BufferCount];
                for (int i = 0; i < BufferCount; i++)
                {
                    _buffers[i]       = new byte[Snes.MaxAudioBufferLength * 4];
                    _bufferStreams[i] = new DataStream(_buffers[i], true, false);
                }

                _bufferCursor = 0;
                _audioBuffer = new AudioBuffer();
                _isPaused = true;
            }
            catch
            {
                Dispose();
                throw;
            }
        }
Ejemplo n.º 10
0
 public OggBuffer(XAudio2 device, String[] fileNames)
 {
     memories       = new List <AudioBuffer>();
     this.fileNames = fileNames;
     this.device    = device;
 }
Ejemplo n.º 11
0
        private void StartEngine()
        {
            if (m_audioEngine != null)
            {
                DisposeVoices();
                m_audioEngine.Dispose();
            }

            // Init/reinit engine
            m_audioEngine = new XAudio2();

            // A way to disable SharpDX callbacks
            //var meth = m_audioEngine.GetType().GetMethod("UnregisterForCallbacks_", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
            //var callbacks = m_audioEngine.GetType().GetField("engineShadowPtr", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.NonPublic);
            //meth.Invoke((object)m_audioEngine, new object[] { callbacks.GetValue(m_audioEngine) });

            m_audioEngine.CriticalError += m_audioEngine_CriticalError;
            m_lastDeviceCount            = m_audioEngine.DeviceCount;


            m_deviceNumber = 0;
            while (true) //find first non com device
            {
                try
                {
                    m_deviceDetails = m_audioEngine.GetDeviceDetails(m_deviceNumber);
                    if (m_deviceDetails.Role == DeviceRole.DefaultCommunicationsDevice)
                    {
                        m_deviceNumber++;
                        if (m_deviceNumber == m_audioEngine.DeviceCount)
                        {
                            m_deviceNumber--;
                            break;
                        }
                    }
                    else
                    {
                        break;
                    }
                }
                catch (Exception e)
                {
                    MyLog.Default.WriteLine(string.Format("Failed to get device details.\n\tdevice no.: {0}\n\tdevice count: {1}", m_deviceNumber, m_audioEngine.DeviceCount), LoggingOptions.AUDIO);
                    MyLog.Default.WriteLine(e.ToString());
                    m_deviceNumber  = 0;
                    m_deviceDetails = m_audioEngine.GetDeviceDetails(m_deviceNumber);
                    break;
                }
            }

            m_masterVoice = new MasteringVoice(m_audioEngine, deviceIndex: m_deviceNumber);

            m_calculateFlags = CalculateFlags.Matrix | CalculateFlags.Doppler;
            if ((m_deviceDetails.OutputFormat.ChannelMask & Speakers.LowFrequency) != 0)
            {
                m_calculateFlags |= CalculateFlags.RedirectToLfe;
            }

            var masterDetails = m_masterVoice.VoiceDetails;

            m_gameAudioVoice      = new SubmixVoice(m_audioEngine, masterDetails.InputChannelCount, masterDetails.InputSampleRate);
            m_musicAudioVoice     = new SubmixVoice(m_audioEngine, masterDetails.InputChannelCount, masterDetails.InputSampleRate);
            m_hudAudioVoice       = new SubmixVoice(m_audioEngine, masterDetails.InputChannelCount, masterDetails.InputSampleRate);
            m_gameAudioVoiceDesc  = new VoiceSendDescriptor[] { new VoiceSendDescriptor(m_gameAudioVoice) };
            m_musicAudioVoiceDesc = new VoiceSendDescriptor[] { new VoiceSendDescriptor(m_musicAudioVoice) };
            m_hudAudioVoiceDesc   = new VoiceSendDescriptor[] { new VoiceSendDescriptor(m_hudAudioVoice) };

            if (m_mute)
            { // keep sounds muted
                m_gameAudioVoice.SetVolume(0);
                m_musicAudioVoice.SetVolume(0);
            }
        }
Ejemplo n.º 12
0
 public Sound()
 {
     xaudio2        = new XAudio2();
     masteringVoice = new MasteringVoice(xaudio2);
     SoundManager   = new Dictionary <string, CachedSound>();
 }
Ejemplo n.º 13
0
 public StreamLoaderDS(string fileName, XAudio2 audioDevice)
 {
     this.fileName = fileName;
     this.audioDevice = audioDevice;
     System.Threading.ThreadPool.QueueUserWorkItem((o) => { ReadStream(); });
 }
Ejemplo n.º 14
0
 static Sound()
 {
     XAudio2 = new XAudio2();
     Master  = new MasteringVoice(XAudio2,2);
 }
Ejemplo n.º 15
0
 public StreamLoader(string fileName, XAudio2 audioDevice)
 {
     this.fileName    = fileName;
     this.audioDevice = audioDevice;
     System.Threading.ThreadPool.QueueUserWorkItem((o) => { Run(); });
 }
Ejemplo n.º 16
0
        /// <summary>
        /// Initializes XAudio2 and MasteringVoice.  And registers itself as an <see cref="IContentReaderFactory"/>
        /// </summary>
        /// <exception cref="InvalidOperationException">Is thrown when the IContentManager is not an instance of <see cref="ContentManager"/>.</exception>
        /// <exception cref="AudioException">Is thrown when the <see cref="AudioManager"/> instance could not be initialized (either due to unsupported features or missing audio-device).</exception>
        public override void Initialize()
        {
            base.Initialize();
            contentManager = Content as ContentManager;
            if (contentManager == null)
            {
                throw new InvalidOperationException("Unable to initialize AudioManager. Expecting IContentManager to be an instance of ContentManager");
            }
            try
            {
#if DEBUG && !WIN8METRO && !WP8 && !DIRECTX11_1
                try
                {
                    // "XAudio2Flags.DebugEngine" is supported only in XAudio 2.7, but not in newer versions
                    // msdn.microsoft.com/en-us/library/windows/desktop/microsoft.directx_sdk.xaudio2.xaudio2create(v=vs.85).aspx
                    Device = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.DefaultProcessor);
                    Device.StartEngine();
                }
                catch (Exception)
#endif
                {
                    Device = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor);
                    Device.StartEngine();
                }
            }
            catch (SharpDXException ex)
            {
                DisposeCore();
                throw new AudioException("Error creating XAudio device.", ex);
            }

#if !W8CORE && !DIRECTX11_1
            if (Device.DeviceCount == 0)
            {
                DisposeCore();
                throw new AudioException("No default audio devices detected.");
            }
#endif

#if W8CORE || DIRECTX11_1
            string deviceId = null;
#else
            const int deviceId = 0;
#endif
            try
            {
                MasteringVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceId);
            }
            catch (SharpDXException ex)
            {
                DisposeCore();
#if W8CORE
                if (ex.ResultCode == AudioManager.NotFound)
                {
                    throw new AudioException("No default audio devices detected.");
                }
                else
#endif
                {
                    throw new AudioException("Error creating mastering voice.", ex);
                }
            }

            MasteringVoice.SetVolume(masterVolume);

#if W8CORE || DIRECTX11_1
            Speakers = (Speakers)MasteringVoice.ChannelMask;
#else
            var deviceDetails = Device.GetDeviceDetails(deviceId);
            Speakers = deviceDetails.OutputFormat.ChannelMask;
#endif

            if (IsMasteringLimiterEnabled)
            {
                try
                {
                    CreateMasteringLimitier();
                }
                catch (Exception)
                {
                    DisposeCore();
                    throw;
                }
            }

            if (IsSpatialAudioEnabled)
            {
                try
                {
                    x3DAudio = new X3DAudio(Speakers, speedOfSound);
                }
                catch (Exception)
                {
                    DisposeCore();
                    throw;
                }
            }

            if (IsReverbEffectEnabled)
            {
                try
                {
                    CreateReverbSubmixVoice();
                }
                catch (Exception)
                {
                    DisposeCore();
                    throw;
                }
            }

            contentManager.ReaderFactories.Add(new AudioContentReaderFactory());
        }
Ejemplo n.º 17
0
 public void Create()
 {
     device = new XAudio2();
     mVoice = new MasteringVoice(device);
     Reset();
 }
Ejemplo n.º 18
0
 public void Create()
 {
     device = new XAudio2();
     mVoice = new MasteringVoice(device);
     Reset();
 }
Ejemplo n.º 19
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Reverb"/> class.
 /// </summary>
 public Reverb(XAudio2 device) : this(device, false)
 {
 }
 internal static global::System.Runtime.InteropServices.HandleRef getCPtr(XAudio2 obj)
 {
     return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr);
 }
Ejemplo n.º 21
0
 public virtual void Sync(EnginePtr exeeng, Direct3D exed3d, OpenGL exeogl, Havok exehvk, GuiFactory exegui, Forms exefms, DirectIpt exedip, WinIpt exewip, FFmpeg exeffm, CryptoPP execpp, ID3Lib exeid3, WinAudio exewad, XAudio2 exexa2, WinMidi exemid, WinSock exewsk, AsyncWorkers exeaws, SQLite exesql, HaruPdf exepdf, RayTracer exertr, Pbrt exepbrt, PythonScriptEngine exepse, Console execle)
 {
     IronSightEnginePINVOKE.IPbrt_Sync(swigCPtr, EnginePtr.getCPtr(exeeng), Direct3D.getCPtr(exed3d), OpenGL.getCPtr(exeogl), Havok.getCPtr(exehvk), GuiFactory.getCPtr(exegui), Forms.getCPtr(exefms), DirectIpt.getCPtr(exedip), WinIpt.getCPtr(exewip), FFmpeg.getCPtr(exeffm), CryptoPP.getCPtr(execpp), ID3Lib.getCPtr(exeid3), WinAudio.getCPtr(exewad), XAudio2.getCPtr(exexa2), WinMidi.getCPtr(exemid), WinSock.getCPtr(exewsk), AsyncWorkers.getCPtr(exeaws), SQLite.getCPtr(exesql), HaruPdf.getCPtr(exepdf), RayTracer.getCPtr(exertr), Pbrt.getCPtr(exepbrt), PythonScriptEngine.getCPtr(exepse), Console.getCPtr(execle));
     if (IronSightEnginePINVOKE.SWIGPendingException.Pending)
     {
         throw IronSightEnginePINVOKE.SWIGPendingException.Retrieve();
     }
 }
Ejemplo n.º 22
0
 /// <summary>
 /// Initializes a new instance of the <see cref="Audio"/> class.
 /// </summary>
 public Audio()
 {
     audio = new XAudio2();
     master = new MasteringVoice(audio);
     sources = new List<SourceVoice>();
 }
Ejemplo n.º 23
0
        static void Main(string[] args)
        {
            Keys       = BassKeys;
            KeyNotes   = BassKeyNotes;
            KeyOctaves = BassKeyOctaves;

            var devices      = Midi.midiInGetNumDevs();
            var deviceHandle = IntPtr.Zero;
            var deviceCaps   = new Midi.MidiInCaps();

            for (var device = 0U; device < devices; device++)
            {
                Midi.midiInOpen(out deviceHandle, device, MidiProc, IntPtr.Zero, Midi.CALLBACK_FUNCTION);
                Midi.midiInGetDevCaps(deviceHandle, ref deviceCaps, (uint)Marshal.SizeOf(deviceCaps));

                Console.WriteLine(deviceCaps.name);

                Midi.midiInStart(deviceHandle);
            }

            var input = new DirectInput();

            var keyboard = new Keyboard(input);

            keyboard.Acquire();

            var audio = new XAudio2();

            audio.StartEngine();

            var master = new MasteringVoice(audio);

            var format = new WaveFormat(44100, 16, 1);

            var source = new SourceVoice(audio, format);

            BufferEnd = new AutoResetEvent(false);

            source.BufferEnd += Source_BufferEnd;

            source.Start();

            var buffers = new AudioBuffer[2];

            var pointers = new DataPointer[buffers.Length];

            for (int buffer = 0; buffer < buffers.Length; buffer++)
            {
                pointers[buffer] = new DataPointer(Utilities.AllocateClearedMemory(1024), 1024);
                buffers[buffer]  = new AudioBuffer(pointers[buffer]);

                source.SubmitSourceBuffer(buffers[buffer], null);
            }

            var index = 0;

            var data          = new byte[1024];
            var time          = 0.0;
            var keyboardState = new KeyboardState();

            while (true)
            {
                BufferEnd.WaitOne();

                keyboard.GetCurrentState(ref keyboardState);

                for (int x = 0; x < data.Length; x += 2)
                {
                    var delta = 1.0 / format.SampleRate;

                    var value = 0d;
                    var count = 0;

                    for (var note = 24; note < MidiNotes.Length; note++)
                    {
                        MidiNotes[note] = false;
                    }

                    for (var key = 0; key < Keys.Length; key++)
                    {
                        var noteIndex = 24 + (KeyOctaves[key] * 12) + KeyNotes[key];

                        if (keyboardState.IsPressed(Keys[key]))
                        {
                            MidiNotes[noteIndex]    = true;
                            MidiVelocity[noteIndex] = 1.0f;
                        }
                    }

                    for (var note = 24; note < MidiNotes.Length; note++)
                    {
                        if (MidiNotes[note])
                        {
                            if (NoteVelocity[note] >= 1.0 - (Attack * delta))
                            {
                                NoteVelocity[note] = 1.0f;
                            }
                            else
                            {
                                NoteVelocity[note] += (Attack * delta);
                            }
                        }
                        else
                        {
                            if (NoteVelocity[note] <= (Release * delta))
                            {
                                NoteVelocity[note] = 0.0f;
                            }
                            else
                            {
                                NoteVelocity[note] -= (Release * delta);
                            }
                        }
                    }

                    for (var octave = 0; octave < 8; octave++)
                    {
                        for (var note = 0; note < 12; note++)
                        {
                            var noteIndex = 24 + (octave * 12) + note;

                            if (NoteVelocity[noteIndex] != 0.0)
                            {
                                value += Waves.Sine(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex];
                                //value += Waves.Square(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex];
                                //value += Waves.Triangle(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex];
                                value += Waves.Sawtooth(time, Notes[note] * MidiOctaves[octave], 0.0) * MidiVelocity[noteIndex] * NoteVelocity[noteIndex];
                                count++;
                            }
                        }
                    }

                    var value2 = (short)((value / 10.0) * short.MaxValue);

                    data[x]     = (byte)(value2 & 0xff);
                    data[x + 1] = (byte)(value2 >> 8);

                    time += delta;
                }

                pointers[index].CopyFrom(data);

                source.SubmitSourceBuffer(buffers[index], null);

                index++;

                if (index == buffers.Length)
                {
                    index = 0;
                }
            }
        }
Ejemplo n.º 24
0
 public AudioDevice()
 {
     xaudio2        = new XAudio2();
     masteringVoice = new MasteringVoice(xaudio2);
 }
Ejemplo n.º 25
0
        public MyEffectInstance(MyAudioEffect effect, IMySourceVoice input, MySourceVoice[] cues, float?duration, XAudio2 engine)
        {
            m_engine = engine;
            m_effect = effect;
            var inputSound = input as MySourceVoice;

            if (inputSound != null)
            {
                Debug.Assert(!inputSound.Voice.IsDisposed);
                var sd = new SoundData()
                {
                    Sound         = inputSound,
                    Pivot         = 0,
                    CurrentEffect = 0,
                    OrigVolume    = inputSound.Volume,
                    OrigFrequency = inputSound.FrequencyRatio,
                };
                //FilterParameters fp = new FilterParameters();
                m_sounds.Add(sd);
            }

            foreach (var sound in cues)
            {
                sound.Start(false); //jn:todo effect command to start sound
                Debug.Assert(!sound.Voice.IsDisposed);
                m_sounds.Add(new SoundData()
                {
                    Sound         = sound,
                    Pivot         = 0,
                    CurrentEffect = 0,
                    OrigVolume    = sound.Volume,
                    OrigFrequency = sound.FrequencyRatio,
                });
            }
            OutputSound.StoppedPlaying += EffectFinished;

            ComputeDurationAndScale(duration);
            Update(0);
        }
Ejemplo n.º 26
0
 public float SemitonesToFrequencyRatio(float semitones)
 {
     return(XAudio2.SemitonesToFrequencyRatio(semitones));
 }
 /// <summary>
 /// Constructor
 /// </summary>
 public SharpAudioDevice()
 {
     _device = new XAudio2();
     _master = new MasteringVoice(_device);
 }
Ejemplo n.º 28
0
        public XA2Engine(AudioEngineOptions options)
        {
            Device = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.AnyProcessor);

            _master = new MasteringVoice(Device, options.SampleChannels, options.SampleRate);
        }
Ejemplo n.º 29
0
        /// <summary>
        /// SharpDX X3DAudio sample. Plays a generated sound rotating around the listener.
        /// </summary>
        static void Main(string[] args)
        {
            var xaudio2 = new XAudio2();

            using (var masteringVoice = new MasteringVoice(xaudio2))
            {
                // Instantiate X3DAudio
                var x3dAudio = new X3DAudio(Speakers.FrontRight);

                var emitter = new Emitter
                {
                    ChannelCount        = 1,
                    CurveDistanceScaler = float.MinValue,
                    OrientFront         = new Vector3(0, 0, 1),
                    OrientTop           = new Vector3(0, 1, 0),
                    Position            = new Vector3(0, 0, 0),
                    Velocity            = new Vector3(0, 0, 0)
                };

                var listener = new Listener
                {
                    OrientFront = new Vector3(0, 0, 1),
                    OrientTop   = new Vector3(0, 1, 0),
                    Position    = new Vector3(0, 0, 0),
                    Velocity    = new Vector3(0, 0, 0)
                };

                var waveFormat  = new WaveFormat(44100, 32, 1);
                var sourceVoice = new SourceVoice(xaudio2, waveFormat);

                int bufferSize = waveFormat.ConvertLatencyToByteSize(60000);
                var dataStream = new DataStream(bufferSize, true, true);

                int numberOfSamples = bufferSize / waveFormat.BlockAlign;
                for (int i = 0; i < numberOfSamples; i++)
                {
                    float value = (float)(Math.Cos(2 * Math.PI * 220.0 * i / waveFormat.SampleRate) * 0.5);
                    dataStream.Write(value);
                }
                dataStream.Position = 0;

                var audioBuffer = new AudioBuffer
                {
                    Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize
                };

                //var reverb = new Reverb();
                //var effectDescriptor = new EffectDescriptor(reverb);
                //sourceVoice.SetEffectChain(effectDescriptor);
                //sourceVoice.EnableEffect(0);

                sourceVoice.SubmitSourceBuffer(audioBuffer, null);

                sourceVoice.Start();

                Console.WriteLine("Play a sound rotating around the listener");
                for (int i = 0; i < 1200; i++)
                {
                    // Rotates the emitter
                    var rotateEmitter      = Matrix.RotationY(i / 5.0f);
                    var newPosition        = Vector3.Transform(new Vector3(0, 0, 1000), rotateEmitter);
                    var newPositionVector3 = new Vector3(newPosition.X, newPosition.Y, newPosition.Z);
                    emitter.Velocity = (newPositionVector3 - emitter.Position) / 0.05f;
                    emitter.Position = newPositionVector3;

                    // Calculate X3DAudio settings
                    var dspSettings = x3dAudio.Calculate(listener, emitter, CalculateFlags.Matrix | CalculateFlags.Doppler, 1, 2);

                    // Modify XAudio2 source voice settings
                    sourceVoice.SetOutputMatrix(1, 2, dspSettings.MatrixCoefficients);
                    sourceVoice.SetFrequencyRatio(dspSettings.DopplerFactor);

                    // Wait for 50ms
                    Thread.Sleep(50);
                }
            }
        }
Ejemplo n.º 30
0
        private void Initialize(string SoundDeviceName, int maxVoicesNbr)
        {
            //Default Xaudio2 objects ==========
            _xaudio2 = ToDispose(new XAudio2());
            if (SoundDeviceName == null)
            {
                _deviceDetail = _xaudio2.GetDeviceDetails(0);
            }
            _soundDevices = new List <string>();

            int customDeviceId = 0;

            //Get all sound devices
            for (int i = 0; i < _xaudio2.DeviceCount; i++)
            {
                _soundDevices.Add(_xaudio2.GetDeviceDetails(i).DisplayName);
                if (SoundDeviceName == _xaudio2.GetDeviceDetails(i).DisplayName)
                {
                    _deviceDetail  = _xaudio2.GetDeviceDetails(i);
                    customDeviceId = i;
                }
            }

            logger.Info("s33m3 sound engine started for device : " + _deviceDetail.DisplayName);

            _x3DAudio = new X3DAudio(_deviceDetail.OutputFormat.ChannelMask);

            if (SoundDeviceName == null)
            {
                _masteringVoice = ToDispose(new MasteringVoice(_xaudio2, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, 0));
            }
            else
            {
                _masteringVoice = ToDispose(new MasteringVoice(_xaudio2, _deviceDetail.OutputFormat.Channels, _deviceDetail.OutputFormat.SampleRate, customDeviceId));
            }

            //Default state values =============
            _maxVoicePoolPerFileType = maxVoicesNbr;

            _soundDataSources     = new Dictionary <string, ISoundDataSource>();
            _soundVoices          = new Dictionary <int, ISoundVoice[]>();
            _soundProcessingQueue = new List <ISoundVoice>();

            _listener = new Listener();

            //Start Sound voice processing thread
            _syncro = new ManualResetEvent(false);
            _d3dEngine.RunningThreadedWork.Add("SoundEngine");
            _d3dEngine.OnShuttingDown += d3dEngine_OnShuttingDown;
            _thread = new Thread(DataSoundPocessingAsync)
            {
                Name = "SoundEngine"
            };                                                                      //Start the main loop
            _stopThreading = false;
            _thread.Start();

            GeneralSoundVolume = 1.0f;

            GlobalMusicVolume = 1;
            GlobalFXVolume    = 1;

            _xaudio2.StartEngine();
        }
        public void PlayPPM(IntPtr win)
        {
            Rate = 192000; //44100 on cheapo, 96000 on AC97, 192000 on HD Audio
                           // its the number of samples that exist for each second of audio
            channels = 2;  // 1 = mono, 2 = stereo

            PPMSamples = (int)(0.0225 * Rate * channels);   // 22 or 22.5ms in samples, rounded up
                                                            // no. of bytes per second = channels * rate * bytes in one sample
            microsec = Rate / 10000.0;                      // 192 = 1ms, 19.2 = 0.1ms or 1mis @ 192khz
            PPMchannels = new Dictionary<int, double>();
            frame = new List<short>();
            Amplitude = 32760;

            /*WaveFile wFile;
            wFile = new WaveFile(channels, 16, Rate);
            */

            //Set channels to neutral except throttle, throttle = zero.
            PPMchannels.Add(1, 10.0); //Throttle
            PPMchannels.Add(2, 50.0); //Ailerons
            PPMchannels.Add(3, 50.0); //Stab
            PPMchannels.Add(4, 50.0); //Rudder
            PPMchannels.Add(5, 50.0);
            PPMchannels.Add(6, 50.0);
            PPMchannels.Add(7, 50.0);
            PPMchannels.Add(8, 50.0);

            byte[] data = GenPPM();

            /*wFile.SetData(data, data.Length);
            wFile.WriteFile(@"C:\Users\kang\Desktop\test.wav");
            */
            ms = new MemoryStream();
            ms.SetLength(0);
            ms.Write(data, 0, data.Length);
            ms.Position = 0;

            wf = new WaveFormat();
            wf.FormatTag = WaveFormatTag.Pcm;
            wf.BitsPerSample = (short)16;
            wf.Channels = channels;
            wf.SamplesPerSecond = Rate;
            wf.BlockAlignment = (short)(wf.Channels * wf.BitsPerSample / 8);
            wf.AverageBytesPerSecond = wf.SamplesPerSecond * wf.BlockAlignment;

            device = new XAudio2();
            device.StartEngine();
            masteringVoice = new MasteringVoice(device);
            srcVoice = new SourceVoice(device, wf);
            buffer = new AudioBuffer();
            buffer.AudioData = ms;
            buffer.AudioBytes = (int)data.Length;
            buffer.Flags = SlimDX.XAudio2.BufferFlags.None;

            srcVoice.BufferStart += new EventHandler<ContextEventArgs>(srcVoice_BufferStart);
            srcVoice.FrequencyRatio = 1;
            srcVoice.SubmitSourceBuffer(buffer);
            srcVoice.Start();
        }
 public StreamSourceVoice(XAudio2 audioDevice)
 {
     this.audioDevice = audioDevice;
 }