예제 #1
0
		public XAudioDevice()
		{
			if (StackTraceExtensions.StartedFromNUnitConsoleButNotFromNCrunch)
				return;
			XAudio = new XAudio2();
			MasteringVoice = new MasteringVoice(XAudio);
		}
예제 #2
0
 public WaveManager()
 {
     xAudio = new XAudio2();
     var mastering = new MasteringVoice(xAudio);
     mastering.SetVolume(1, 0);
     xAudio.StartEngine();
 }
예제 #3
0
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            _videoMss = new MediaStreamSource(new VideoStreamDescriptor(VideoEncodingProperties.CreateH264()));
            _videoMss.BufferTime = TimeSpan.Zero;
            _videoMss.CanSeek = false;
            _videoMss.Duration = TimeSpan.Zero;
            _videoMss.SampleRequested += _videoMss_SampleRequested;

            XAudio2 xaudio = new XAudio2();
            MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
            WaveFormat format = new WaveFormat(48000, 16, 2);

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            // Start playing right away
            StreamDisplay.AutoPlay = true;

            StreamDisplay.SetMediaStreamSource(_videoMss);

            AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
        }
 public SoundManager(int cntVoices)
 {
     audio = new XAudio2();
     masteringVoice = new MasteringVoice(audio);
     masteringVoice.SetVolume(0.5f);
     voices = new SourceVoice[cntVoices];
     buffers = new AudioBuffer[cntVoices];
     streams = new SoundStream[cntVoices];
 }
예제 #5
0
 public SoundManager(int sounds)
 {
     _audio = new XAudio2();
     _masteringVoice = new MasteringVoice(_audio);
     _masteringVoice.SetVolume(0.5f);
     _soundStreams = new SoundStream[sounds];
     _audioBuffers = new AudioBuffer[sounds];
     _sourceVoices = new SourceVoice[sounds];
 }
예제 #6
0
 public Audio(String fileName)
 {
     device = new XAudio2();
     masteringVoice = new MasteringVoice(device);
     stream = new SoundStream(File.OpenRead("Content/"+fileName));
     buffer = new AudioBuffer {Stream  = stream.ToDataStream(),
         AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream};
     stream.Close();
 }
예제 #7
0
        public X3DAudioEngine()
        {
            _xaudio2 = new XAudio2();
            _masteringVoice = new MasteringVoice(_xaudio2);

            _deviceFormat = _xaudio2.GetDeviceDetails(0).OutputFormat;
            _x3dAudio = new X3DAudio(_deviceFormat.ChannelMask);

            Position = new Vector3D(0, 0, 0);
            Rotation = System.Windows.Media.Media3D.Quaternion.Identity;
        }
예제 #8
0
        public XAudio2Mixer()
        {
            _xAudio = new XAudio2();
            _xAudio.StartEngine();
            _masteringVoice = new MasteringVoice(_xAudio);
            _buffers = new XAudioBuffer[NumBuffers];

            for (var i = 0; i < NumBuffers; i++)
            {
                _buffers[i] = new XAudioBuffer(BufferSize);
            }
        }
예제 #9
0
파일: Program.cs 프로젝트: numo16/SharpDX
        /// <summary>
        /// SharpDX XAudio2 sample. Plays wav/xwma/adpcm files from the disk.
        /// </summary>
        static void Main(string[] args)
        {
            var xaudio2 = new XAudio2();
            var masteringVoice = new MasteringVoice(xaudio2);

            PLaySoundFile(xaudio2, "1) Playing a standard WAV file", "ergon.wav");
            PLaySoundFile(xaudio2, "2) Playing a XWMA file", "ergon.xwma");
            PLaySoundFile(xaudio2, "3) Playing an ADPCM file", "ergon.adpcm.wav");

            masteringVoice.Dispose();
            xaudio2.Dispose();
        }
예제 #10
0
 internal FactoryHandlerXAudio2()
 {
     try
     {
         m_xaudioDevice   = new SharpDX.XAudio2.XAudio2(SharpDX.XAudio2.XAudio2Version.Default);
         m_masteringVoice = new SharpDX.XAudio2.MasteringVoice(m_xaudioDevice);
     }
     catch (Exception)
     {
         m_xaudioDevice   = null;
         m_masteringVoice = null;
     }
 }
예제 #11
0
		/// <summary>
		/// 
		/// </summary>
        public override void Initialize()
        {
            try
            {
                if (Device == null) {
                    Device = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor);
                    Device.StartEngine();
                }

				var DeviceFormat = Device.GetDeviceDetails(0).OutputFormat;

                // Just use the default device.
                const int deviceId = 0;

                if (MasterVoice == null) {
                    // Let windows autodetect number of channels and sample rate.
                    MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceId);
                    MasterVoice.SetVolume(_masterVolume, 0);
                }

                // The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout.
                var deviceDetails = Device.GetDeviceDetails(deviceId);
                Speakers = deviceDetails.OutputFormat.ChannelMask;

				var dev3d = Device3D;

				Log.Debug("Audio devices :");
				for ( int devId = 0; devId < Device.DeviceCount; devId++ ) {
					var device = Device.GetDeviceDetails( devId );

					Log.Debug( "[{1}] {0}", device.DisplayName, devId );
					Log.Debug( "    role : {0}", device.Role );
					Log.Debug( "    id   : {0}", device.DeviceID );
				}
            }
            catch
            {
                // Release the device and null it as
                // we have no audio support.
                if (Device != null)
                {
                    Device.Dispose();
                    Device = null;
                }

                MasterVoice = null;
            }


			soundWorld	=	new SoundWorld(Game);
        }
예제 #12
0
        internal unsafe void CreateMasteringVoice27(MasteringVoice masteringVoiceOut, int inputChannels, int inputSampleRate, int flags, int deviceIndex, EffectChain?effectChainRef)
        {
            IntPtr      zero = IntPtr.Zero;
            EffectChain value;

            if (effectChainRef.HasValue)
            {
                value = effectChainRef.Value;
            }
            Result result = LocalInterop.Calliint(this._nativePointer, (void *)&zero, inputChannels, inputSampleRate, flags, deviceIndex, effectChainRef.HasValue ? ((void *)(&value)) : ((void *)IntPtr.Zero), *(*(void ***)this._nativePointer + 10));

            masteringVoiceOut.NativePointer = zero;
            result.CheckError();
        }
예제 #13
0
        // This region is currently nor implemented nor exposed to the client

        private void AudioEngineImpl(AudioDevice device)
        {
            try
            {
                XAudio2 = new XAudio2();
                X3DAudio = new X3DAudio(Speakers.Stereo); // only stereo mode currently supported

                XAudio2.CriticalError += XAudio2OnCriticalError;

                MasteringVoice = new MasteringVoice(XAudio2, 2, (int)AudioSampleRate); // Let XAudio choose an adequate sampling rate for the platform and the configuration but not number of channels [force to stereo 2-channels]. 

                if (!mediaEngineStarted)
                {
                    // MediaManager.Startup(); <- MediaManager.Shutdown is buggy (do not set isStartUp to false) so we are forced to directly use MediaFactory.Startup here while sharpDX is not corrected.
                    MediaFactory.Startup(0x20070, 0);
                    mediaEngineStarted = true;
                }

                PlatformSpecificInit();
            }
            catch (DllNotFoundException exp)
            {
                State = AudioEngineState.Invalidated;
                Logger.Warning("One or more of the XAudio and MediaFoundation dlls were not found on the computer. " +
                               "Audio functionalities will not fully work for the current game instance." +
                               "To fix the problem install or repair the installation of XAudio and Media Foundation. [Exception details: {0}]", exp.Message);
            }
            catch (SharpDX.SharpDXException exp)
            {
                State = AudioEngineState.Invalidated;

                if (exp.ResultCode == XAudioErrorCodes.ErrorInvalidCall)
                {
                    Logger.Warning("Initialization of the audio engine failed. This may be due to missing audio hardware or missing audio outputs. [Exception details: {0}]", exp.Message);
                }
                else if (exp.ResultCode == 0x8007007E)
                {
                    Logger.Warning( "Audio engine initialization failed. This is probably due to missing dll on your computer. " +
                                    "Please check that XAudio2 and MediaFoundation are correctly installed.[Exception details: {0}]", exp.Message);
                }
                else
                {
                    Logger.Warning("Audio engine initialization failed. [Exception details: {0}]", exp.Message);
                }
            }
        }
예제 #14
0
        public PlayForm()
        {
            InitializeComponent();

            // Initalize XAudio2
            xaudio2 = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor);
            masteringVoice = new MasteringVoice(xaudio2);

            var waveFormat = new WaveFormat(44100, 32, 2);
             sourceVoice = new SourceVoice(xaudio2, waveFormat);

            int bufferSize = waveFormat.ConvertLatencyToByteSize(60000);
            DataStream dataStream = new DataStream(bufferSize, true, true);

            // Prepare the initial sound to modulate
            int numberOfSamples = bufferSize / waveFormat.BlockAlign;
            for (int i = 0; i < numberOfSamples; i++)
            {
                float value = (float)(Math.Cos(2 * Math.PI * 220.0 * i / waveFormat.SampleRate) * 0.5);
                dataStream.Write(value);
                dataStream.Write(value);
            }
            dataStream.Position = 0;

            audioBuffer = new AudioBuffer
                              {
                                  Stream = dataStream,
                                  Flags = BufferFlags.EndOfStream,
                                  AudioBytes = bufferSize,
                                  LoopBegin = 0,
                                  LoopLength = numberOfSamples,
                                  LoopCount = AudioBuffer.LoopInfinite
                              };

            // Set the effect on the source
            ModulatorEffect = new ModulatorEffect();
            modulatorDescriptor = new EffectDescriptor(ModulatorEffect);
            reverb = new Reverb(xaudio2);
            effectDescriptor = new EffectDescriptor(reverb);
            //sourceVoice.SetEffectChain(modulatorDescriptor, effectDescriptor);
            sourceVoice.SetEffectChain(modulatorDescriptor);
            //sourceVoice.EnableEffect(0);

            this.Closed += new EventHandler(PlayForm_Closed);
        }
예제 #15
0
        static void Main(string[] args)
        {
            XAudio2 audioDevice = new XAudio2();
            Console.WriteLine("Device count: " + audioDevice.DeviceCount);
            for (int indexOfDevice = 0; indexOfDevice < audioDevice.DeviceCount; indexOfDevice++)
            {
                DeviceDetails deviceDetails = audioDevice.GetDeviceDetails(indexOfDevice);
                Console.WriteLine("DeviceID: " + deviceDetails.DeviceID);
                Console.WriteLine("Device Name: " + deviceDetails.DisplayName);
                Console.WriteLine("Output format:" + deviceDetails.OutputFormat);
                Console.WriteLine("Role: " + deviceDetails.Role);
                Console.WriteLine();
            }

            MasteringVoice masteringVoice = new MasteringVoice(audioDevice);
            Console.WriteLine("Volume: " + masteringVoice.Volume);
            Console.ReadKey();
        }
예제 #16
0
        public SoundEffect(string soundFxPath)
        {
            _xaudio = new XAudio2();
            var masteringsound = new MasteringVoice(_xaudio);

            var nativefilestream = new NativeFileStream(
            soundFxPath,
            NativeFileMode.Open,
            NativeFileAccess.Read,
            NativeFileShare.Read);

            _soundstream = new SoundStream(nativefilestream);
            _waveFormat = _soundstream.Format;
            _buffer = new AudioBuffer
            {
                Stream = _soundstream.ToDataStream(),
                AudioBytes = (int)_soundstream.Length,
                Flags = BufferFlags.EndOfStream
            };
        }
예제 #17
0
        /// <summary>
        /// SharpDX XAudio2 sample. Plays a generated sound with some reverb.
        /// </summary>
        static void Main(string[] args)
        {
            var xaudio2 = new XAudio2();
            var masteringVoice = new MasteringVoice(xaudio2);

            var waveFormat = new WaveFormat(44100, 32, 2);
            var sourceVoice = new SourceVoice(xaudio2, waveFormat);

            int bufferSize = waveFormat.ConvertLatencyToByteSize(60000);
            var dataStream = new DataStream(bufferSize, true, true);

            int numberOfSamples = bufferSize/waveFormat.BlockAlign;
            for (int i = 0; i < numberOfSamples; i++)
            {
                double vibrato = Math.Cos(2 * Math.PI * 10.0 * i / waveFormat.SampleRate);
                float value = (float) (Math.Cos(2*Math.PI*(220.0 + 4.0*vibrato)*i/waveFormat.SampleRate)*0.5); 
                dataStream.Write(value);
                dataStream.Write(value);
            }
            dataStream.Position = 0;

            var audioBuffer = new AudioBuffer {Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize};

            var reverb = new Reverb();
            var effectDescriptor = new EffectDescriptor(reverb);
            sourceVoice.SetEffectChain(effectDescriptor);
            sourceVoice.EnableEffect(0);

            sourceVoice.SubmitSourceBuffer(audioBuffer, null);

            sourceVoice.Start();

            Console.WriteLine("Play sound");
            for(int i = 0; i < 60; i++)
            {
                Console.Write(".");
                Console.Out.Flush();
                Thread.Sleep(1000);
            }
        }
예제 #18
0
        private void DestroyImpl()
        {
            if (MasteringVoice != null)
            {
                MasteringVoice.Dispose();
                MasteringVoice = null;
            }

            if (XAudio2 != null)
            {
                XAudio2.StopEngine();
                XAudio2.Dispose();
                XAudio2 = null;
            }

            PlatformSpecificDispose();

            if (mediaEngineStarted && nbOfAudioEngineInstances == 0)
            {
                MediaManager.Shutdown();
                mediaEngineStarted = false;
            }
        }
예제 #19
0
        //static int dev_ind = 0;
        public void Init(int dev, int ch)
        {
            xa_sou = new SharpDX.XAudio2.XAudio2(SharpDX.XAudio2.XAudio2Version.Version27);
            //if (dev >= xa_sou.DeviceCount)
            //{ MessageBox.Show("device_max:" + (xa_sou.DeviceCount - 1).ToString() + " select is:"
            //    + dev + "\r\nError Restart device 0"); dev_ind = 0; }
            //else { dev_ind = dev; }
            switch (ch)
            {
            case 21:
                ch = 3;
                break;

            case 51:
                ch = 6;
                break;
            }
            //var xa_mas = new SharpDX.XAudio2.MasteringVoice(xa_sou, ch, 48000, dev_ind);
            var xa_mas = new SharpDX.XAudio2.MasteringVoice(xa_sou, ch, 48000);

            xa_mas.Dispose();
            mus_start.Clear();
        }
예제 #20
0
        internal override void DestroyAudioEngine()
        {
            if (MasteringVoice != null)
            {
                MasteringVoice.Dispose();
                MasteringVoice = null;
            }

            if (XAudio2 != null)
            {
                XAudio2.StopEngine();
                XAudio2.Dispose();
                XAudio2 = null;
            }

            DisposeImpl();

            if (mediaEngineStarted && nbOfAudioEngineInstances == 0)
            {
                MediaManager.Shutdown();
                mediaEngineStarted = false;
            }
        }
예제 #21
0
        private void StartEngine()
        {
            if (m_audioEngine != null)
            {
                DisposeVoices();
                m_audioEngine.CriticalError -= m_audioEngine_CriticalError;
                m_audioEngine.Dispose();
            }

            // Init/reinit engine
            m_audioEngine = new XAudio2(XAudio2Version.Version27);

            // A way to disable SharpDX callbacks
            //var meth = m_audioEngine.GetType().GetMethod("UnregisterForCallbacks_", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance);
            //var callbacks = m_audioEngine.GetType().GetField("engineShadowPtr", System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.NonPublic);
            //meth.Invoke((object)m_audioEngine, new object[] { callbacks.GetValue(m_audioEngine) });

            m_audioEngine.CriticalError += m_audioEngine_CriticalError;
            m_lastDeviceCount = m_audioEngine.DeviceCount;


            m_deviceNumber = 0;
            while (true) //find first non com device
            {
                try
                {
                    m_deviceDetails = m_audioEngine.GetDeviceDetails(m_deviceNumber);
                    if (m_deviceDetails.Role == DeviceRole.DefaultCommunicationsDevice)
                    {
                        m_deviceNumber++;
                        if (m_deviceNumber == m_audioEngine.DeviceCount)
                        {
                            m_deviceNumber--;
                            break;
                        }
                    }
                    else
                        break;
                }
                catch(Exception e)
                {
                    MyLog.Default.WriteLine(string.Format("Failed to get device details.\n\tdevice no.: {0}\n\tdevice count: {1}",m_deviceNumber, m_audioEngine.DeviceCount),LoggingOptions.AUDIO);
                    MyLog.Default.WriteLine(e.ToString());
                    m_deviceNumber = 0;
                    m_deviceDetails = m_audioEngine.GetDeviceDetails(m_deviceNumber);
                    break;
                }
            }

            m_masterVoice = new MasteringVoice(m_audioEngine, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, m_deviceNumber);
            
            if (m_useVolumeLimiter)
            {
                var limiter = new SharpDX.XAPO.Fx.MasteringLimiter(m_audioEngine);
                var param = limiter.Parameter;
                param.Loudness = 0;
                limiter.Parameter = param;
                //TODO: this throws exception in 3.0.1 version
                var effectDescriptor = new EffectDescriptor(limiter);
                m_masterVoice.SetEffectChain(effectDescriptor);
                m_soundLimiterReady = true;
                m_masterVoice.DisableEffect(0);
                //m_masterVoice.EnableEffect(0);
                //limiter.Dispose();
            }

            m_calculateFlags = CalculateFlags.Matrix | CalculateFlags.Doppler;
            if ((m_deviceDetails.OutputFormat.ChannelMask & Speakers.LowFrequency) != 0)
            {
                m_calculateFlags |= CalculateFlags.RedirectToLfe;
            }

			var masterDetails = m_masterVoice.VoiceDetails;
            m_gameAudioVoice = new SubmixVoice(m_audioEngine, masterDetails.InputChannelCount, masterDetails.InputSampleRate);
            m_musicAudioVoice = new SubmixVoice(m_audioEngine, masterDetails.InputChannelCount, masterDetails.InputSampleRate);
            m_hudAudioVoice = new SubmixVoice(m_audioEngine, masterDetails.InputChannelCount, masterDetails.InputSampleRate);
            m_gameAudioVoiceDesc = new VoiceSendDescriptor[] { new VoiceSendDescriptor(m_gameAudioVoice) };
            m_musicAudioVoiceDesc = new VoiceSendDescriptor[] { new VoiceSendDescriptor(m_musicAudioVoice) };
            m_hudAudioVoiceDesc = new VoiceSendDescriptor[] { new VoiceSendDescriptor(m_hudAudioVoice) };

            if (m_mute)
            { // keep sounds muted 
                m_gameAudioVoice.SetVolume(0);
                m_musicAudioVoice.SetVolume(0);
            }

            m_reverb = new Reverb(m_audioEngine);
            m_gameAudioVoice.SetEffectChain(new EffectDescriptor(m_reverb, masterDetails.InputChannelCount));
            m_gameAudioVoice.DisableEffect(0);
        }
예제 #22
0
		private void DisposeMasteringVoice()
		{
			if (MasteringVoice != null)
				MasteringVoice.Dispose();
			MasteringVoice = null;
		}
예제 #23
0
        static SoundEffect()
        {
            var flags = XAudio2Flags.None;

#if !WINRT && DEBUG
            flags |= XAudio2Flags.DebugEngine;
#endif
            try
            {
                // This cannot fail.
                Device = new XAudio2(flags, ProcessorSpecifier.DefaultProcessor);

                Device.StartEngine();

                // Just use the default device.
#if WINRT
                string deviceId = null;
#else
                const int deviceId = 0;
#endif

                // Let windows autodetect number of channels and sample rate.
                MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceId);            
                MasterVoice.SetVolume(_masterVolume, 0);

                // The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout.
#if WINRT
                Speakers = (Speakers)MasterVoice.ChannelMask;
#else
                var deviceDetails = Device.GetDeviceDetails(deviceId);
                Speakers = deviceDetails.OutputFormat.ChannelMask;
#endif
            }
            catch
            {
                // Release the device and null it as
                // we have no audio support.
                if (Device != null)
                {
                    Device.Dispose();
                    Device = null;
                }

                MasterVoice = null;
            }

        }
예제 #24
0
		/// <summary>
		/// 
		/// </summary>
		/// <param name="disposing"></param>
        protected override void Dispose(bool disposing)
        {
			if (disposing) {
				if (MasterVoice != null) {
					MasterVoice.DestroyVoice();
					MasterVoice.Dispose();
					MasterVoice = null;
				}

				if (Device != null) {
					Device.StopEngine();
					Device.Dispose();
					Device = null;
				}

				_device3DDirty = true;
				_speakers = Speakers.Stereo;
			}
        }
예제 #25
0
        /// <summary>
        /// SharpDX X3DAudio sample. Plays a generated sound rotating around the listener.
        /// </summary>
        static void Main(string[] args)
        {
            var xaudio2 = new XAudio2();
            using (var masteringVoice = new MasteringVoice(xaudio2))
            {
                // Instantiate X3DAudio
                var x3dAudio = new X3DAudio(Speakers.Stereo);

                var emitter = new Emitter
                                  {
                                      ChannelCount = 1,
                                      CurveDistanceScaler = float.MinValue,
                                      OrientFront = new Vector3(0, 0, 1),
                                      OrientTop = new Vector3(0, 1, 0),
                                      Position = new Vector3(0, 0, 0),
                                      Velocity = new Vector3(0, 0, 0)
                                  };

                var listener = new Listener
                                   {
                                       OrientFront = new Vector3(0, 0, 1),
                                       OrientTop = new Vector3(0, 1, 0),
                                       Position = new Vector3(0, 0, 0),
                                       Velocity = new Vector3(0, 0, 0)
                                   };

                var waveFormat = new WaveFormat(44100, 32, 1);
                var sourceVoice = new SourceVoice(xaudio2, waveFormat);

                int bufferSize = waveFormat.ConvertLatencyToByteSize(60000);
                var dataStream = new DataStream(bufferSize, true, true);

                int numberOfSamples = bufferSize/waveFormat.BlockAlign;
                for (int i = 0; i < numberOfSamples; i++)
                {
                    float value = (float) (Math.Cos(2*Math.PI*220.0*i/waveFormat.SampleRate)*0.5);
                    dataStream.Write(value);
                }
                dataStream.Position = 0;

                var audioBuffer = new AudioBuffer
                                      {Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize};

                //var reverb = new Reverb();
                //var effectDescriptor = new EffectDescriptor(reverb);
                //sourceVoice.SetEffectChain(effectDescriptor);
                //sourceVoice.EnableEffect(0);

                sourceVoice.SubmitSourceBuffer(audioBuffer, null);

                sourceVoice.Start();

                Console.WriteLine("Play a sound rotating around the listener");
                for (int i = 0; i < 1200; i++)
                {
                    // Rotates the emitter
                    var rotateEmitter = Matrix.RotationY(i/5.0f);
                    var newPosition = Vector3.Transform(new Vector3(0, 0, 1000), rotateEmitter);
                    var newPositionVector3 = new Vector3(newPosition.X, newPosition.Y, newPosition.Z);
                    emitter.Velocity = (newPositionVector3 - emitter.Position)/0.05f;
                    emitter.Position = newPositionVector3;

                    // Calculate X3DAudio settings
                    var dspSettings = x3dAudio.Calculate(listener, emitter, CalculateFlags.Matrix | CalculateFlags.Doppler, 1, 2);

                    // Modify XAudio2 source voice settings
                    sourceVoice.SetOutputMatrix(1, 2, dspSettings.MatrixCoefficients);
                    sourceVoice.SetFrequencyRatio(dspSettings.DopplerFactor);

                    // Wait for 50ms
                    Thread.Sleep(50);
                }
            }
        }
예제 #26
0
        private void btnXAudio2_Click(object sender, RoutedEventArgs e)
        {

            XAudio2 xaudio;
            MasteringVoice masteringVoice;

            xaudio = new XAudio2();
            masteringVoice = new MasteringVoice(xaudio);

            var nativefilestream = new NativeFileStream(
                @"Assets\Clk_1Sec1.wav",
                NativeFileMode.Open,
                NativeFileAccess.Read,
                NativeFileShare.Read);

            var soundstream = new SoundStream(nativefilestream);


            var waveFormat = soundstream.Format;
            var buffer = new AudioBuffer
            {
                Stream = soundstream.ToDataStream(),
                AudioBytes = (int)soundstream.Length,
                Flags = BufferFlags.EndOfStream,
            };

            var sourceVoice = new SourceVoice(xaudio, waveFormat, true);

            // There is also support for shifting the frequency.
            sourceVoice.SetFrequencyRatio(1.0f);

            sourceVoice.SubmitSourceBuffer(buffer, soundstream.DecodedPacketsInfo);

            sourceVoice.Start();
        }
예제 #27
0
        internal static void InitializeSoundEffect()
        {
            try
            {
                if (Device == null)
                {
#if !WINRT && DEBUG
                    try
                    {
                        //Fails if the XAudio2 SDK is not installed
                        Device = new XAudio2(XAudio2Flags.DebugEngine, ProcessorSpecifier.DefaultProcessor);
                        Device.StartEngine();
                    }
                    catch
#endif
                    {
                        Device = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor);
                        Device.StartEngine();
                    }
                }

                // Just use the default device.
#if WINRT
                string deviceId = null;
#else
                const int deviceId = 0;
#endif

                if (MasterVoice == null)
                {
                    // Let windows autodetect number of channels and sample rate.
                    MasterVoice = new MasteringVoice(Device, XAudio2.DefaultChannels, XAudio2.DefaultSampleRate, deviceId);
                }

                // The autodetected value of MasterVoice.ChannelMask corresponds to the speaker layout.
#if WINRT
                Speakers = (Speakers)MasterVoice.ChannelMask;
#else
                var deviceDetails = Device.GetDeviceDetails(deviceId);
                Speakers = deviceDetails.OutputFormat.ChannelMask;
#endif
            }
            catch
            {
                // Release the device and null it as
                // we have no audio support.
                if (Device != null)
                {
                    Device.Dispose();
                    Device = null;
                }

                MasterVoice = null;
            }
        }
예제 #28
0
        internal unsafe void CreateMasteringVoice27(MasteringVoice masteringVoiceOut, int inputChannels, int inputSampleRate, int flags, int deviceIndex, EffectChain? effectChainRef)
        {
	        IntPtr zero = IntPtr.Zero;
	        EffectChain value;
	        if (effectChainRef.HasValue)
	        {
		        value = effectChainRef.Value;
	        }
	        Result result = LocalInterop.Calliint(this._nativePointer, (void*)&zero, inputChannels, inputSampleRate, flags, deviceIndex, effectChainRef.HasValue ? ((void*)(&value)) : ((void*)IntPtr.Zero), *(*(void***)this._nativePointer + 10));
	        masteringVoiceOut.NativePointer = zero;
	        result.CheckError();
        }
예제 #29
0
        private void InitializeXAudio2()
        {
            // This is mandatory when using any of SharpDX.MediaFoundation classes
            MediaManager.Startup();

            // Starts The XAudio2 engine
            xaudio2 = new XAudio2();
            xaudio2.StartEngine();
            masteringVoice = new MasteringVoice(xaudio2);
        }
예제 #30
0
		public unsafe XAudio2Renderer()
		{
			waveFormat = new WaveFormat();
			xAudio = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.AnyProcessor);
			masteringVoice = new MasteringVoice(xAudio, 2, 44100);
		}
예제 #31
0
파일: Program.cs 프로젝트: zetaPRIME/xybrid
        static void Main(string[] args)
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);
            //Application.Run(new UIForm());

            UIHandler.Init();
            GraphicsManager.Init();
            ThemeManager.LoadDefault();
            InteropManager.Init();

            /*UIForm f1 = new UIForm(1);
            UIForm f2 = new UIForm(2);

            Button b = new Button();
            b.SetBounds(48, 48, 48, 48);
            b.Click += (Object s, EventArgs e) => {
                new UIForm(32).Show();
            };
            f1.Controls.Add(b);

            f2.Show();*/
            new WindowMain().Open();

            UIManager.SpawnUIUpdateThread();
            //Application.Run(f1);
            Application.Run();

            bool yes = true;
            if (yes) return;

            //
            XAudio2 audio = new XAudio2();
            WaveFormat format = new WaveFormat(44100, 32, 2);
            MasteringVoice mvoice = new MasteringVoice(audio);
            SourceVoice voice = new SourceVoice(audio, format);

            int bufferSize = format.ConvertLatencyToByteSize(500);

            DataStream stream = new DataStream(bufferSize, true, true);

            int samples = bufferSize / format.BlockAlign;
            for (int i = 0; i < samples; i++) {
                float val = (float)(Math.Sin(2*Math.PI*500*i/format.SampleRate));// * (0.5 + Math.Sin(2*Math.PI*2.2*i/format.SampleRate)*0.5));
                //if (val == 0) val = 1;
                //val = val / Math.Abs(val);
                for (int j = 0; j < 3; j++) val = val * val;
                stream.Write(val);
                stream.Write(val);
                //stream.Write(val * (float)(0.5 + Math.Sin(2 * Math.PI * 2.2 * i / format.SampleRate) * 0.5));
                //stream.Write(val * (float)(0.5 + Math.Cos(2 * Math.PI * 2.2 * i / format.SampleRate) * 0.5));
            }

            stream.Position = 0;

            AudioBuffer buffer = new AudioBuffer { Stream = stream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize };
            //buffer.LoopCount = AudioBuffer.LoopInfinite;
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);
            voice.SubmitSourceBuffer(buffer, null);

            voice.Start();
            Thread.Sleep(3000);
            long pos = stream.Position;
            stream.Position = 0;
            for (int i = 0; i < samples; i++) {
                float val = (float)(Math.Sin(2 * Math.PI * 500 * i / format.SampleRate));// * (0.5 + Math.Sin(2*Math.PI*2.2*i/format.SampleRate)*0.5));
                //if (val == 0) val = 1;
                //val = val / Math.Abs(val);
                //for (int j = 0; j < 8; j++) val = val * val;
                stream.Write(val);
                stream.Write(val);
                //stream.Write(val * (float)(0.5 + Math.Sin(2 * Math.PI * 2.2 * i / format.SampleRate) * 0.5));
                //stream.Write(val * (float)(0.5 + Math.Cos(2 * Math.PI * 2.2 * i / format.SampleRate) * 0.5));
            }
            //stream.Position = pos;
            //voice.State.p
            while (voice.State.BuffersQueued > 0) Thread.Sleep(100);
            //Thread.Sleep(6000);
        }
예제 #32
0
 /// <summary>
 /// Constructor
 /// </summary>
 public SharpAudioDevice()
 {
     _device = new XAudio2();
     _master = new MasteringVoice(_device);
 }
예제 #33
0
 /// <summary>Initializes a new instance of the <see cref="T:Gorgon.Examples.AudioPlayback"/> class.</summary>
 public AudioPlayback()
 {
     _audio       = new Xa.XAudio2();
     _masterVoice = new Xa.MasteringVoice(_audio);
 }
예제 #34
-1
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            // This code is based upon the MS FFmpegInterop project on GitHub
            VideoEncodingProperties videoProps = VideoEncodingProperties.CreateH264();
            videoProps.ProfileId = H264ProfileIds.High;
            videoProps.Width = (uint)streamConfig.GetWidth();
            videoProps.Height = (uint)streamConfig.GetHeight();
            videoProps.Bitrate = (uint)streamConfig.GetBitrate();

            _videoMss = new MediaStreamSource(new VideoStreamDescriptor(videoProps));
            _videoMss.BufferTime = TimeSpan.Zero;
            _videoMss.CanSeek = false;
            _videoMss.Duration = TimeSpan.Zero;
            _videoMss.SampleRequested += _videoMss_SampleRequested;

            XAudio2 xaudio = new XAudio2();
            MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
            WaveFormat format = new WaveFormat(48000, 16, 2);

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            StreamDisplay.SetMediaStreamSource(_videoMss);
            AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
        }