예제 #1
0
        public OpenAlAudioManager(ContentManager content, string audioDirectoryName)
        {
            if (content == null)
            {
                throw new ArgumentNullException("content");
            }
            if (audioDirectoryName == null)
            {
                throw new ArgumentNullException("audioDirectoryName");
            }

            _content            = content;
            _audioDirectoryName = audioDirectoryName;

            // destroy previous context, specifically because monogame creates one
            var context = Alc.GetCurrentContext();

            Alc.DestroyContext(context);

            _context = new AudioContext();
            _context.MakeCurrent();

            _efx     = new EffectsExtension();
            _sounds  = new Dictionary <string, Sound>();
            _effects = new Dictionary <string, AudioEffect>();
            if (_efx.IsInitialized)
            {
                _effects.Add("lowpassfilter", new LowPassFilterAudioEffect(_efx));
            }
        }
예제 #2
0
        void shutDownAudio()
        {
            ContextHandle alContext    = Alc.GetCurrentContext();
            IntPtr        alDevice     = Alc.GetContextsDevice(alContext);
            ContextHandle emptyContext = ContextHandle.Zero;

            Alc.MakeContextCurrent(emptyContext);
            Alc.DestroyContext(alContext);
            Alc.CloseDevice(alDevice);
        }
예제 #3
0
        public void CleanAndExit()
        {
            ContextHandle alContext    = Alc.GetCurrentContext();
            IntPtr        alDevice     = Alc.GetContextsDevice(alContext);
            ContextHandle emptyContext = ContextHandle.Zero;

            Alc.MakeContextCurrent(emptyContext);
            Alc.DestroyContext(alContext);
            Alc.CloseDevice(alDevice);
        }
예제 #4
0
        public AudioDevice(string deviceName)
        {
            if (deviceName != null && !AvailableDevices.Contains(deviceName))
            {
                throw new InvalidOperationException(string.Format("AudioDevice \"{0}\" does not exist.", deviceName));
            }

            Context = new OpenTK.Audio.AudioContext(deviceName, 0, 15, true, true, AudioContext.MaxAuxiliarySends.UseDriverDefault);
            CheckAlcError();
            deviceHandle = Alc.GetContextsDevice(Alc.GetCurrentContext());
            CheckAlcError();
            Efx = new EffectsExtension();
            CheckAlcError();

            int[] val = new int[4];
            DeviceName    = Context.CurrentDevice;
            VendorName    = AL.Get(ALGetString.Vendor);
            Renderer      = AL.Get(ALGetString.Renderer);
            DriverVersion = AL.Get(ALGetString.Version);
            int major, minor;

            Alc.GetInteger(deviceHandle, AlcGetInteger.MajorVersion, 1, val);
            major = val[0];
            Alc.GetInteger(deviceHandle, AlcGetInteger.MinorVersion, 1, val);
            minor   = val[0];
            Version = new Version(major, minor);
            Alc.GetInteger(deviceHandle, AlcGetInteger.EfxMajorVersion, 1, val);
            major = val[0];
            Alc.GetInteger(deviceHandle, AlcGetInteger.EfxMinorVersion, 1, val);
            minor      = val[0];
            EfxVersion = new Version(major, minor);
            Alc.GetInteger(deviceHandle, AlcGetInteger.EfxMaxAuxiliarySends, 1, val);
            MaxRoutes  = val[0];
            Extensions = new List <string>(AL.Get(ALGetString.Extensions).Split(' ')).AsReadOnly();

            AL.DistanceModel(ALDistanceModel.ExponentDistance);

            CheckAudioCapabilities(LogLevel.Verbose);
            LogDiagnostics(LogLevel.Verbose);

            Factory  = new AudioFactory(this);
            Listener = new AudioListener(this);
            Listener.Orientation(Vector3.UnitY, Vector3.UnitZ);

            updateTaskCancelation = new CancellationTokenSource();
            updateTask            = Task.Factory.StartNew(Update);
        }
예제 #5
0
        protected override void Dispose(bool disposing)
        {
            _watchThread.Terminate();

            if (_context != IntPtr.Zero)
            {
                var currentContext = Alc.GetCurrentContext();
                if (currentContext == _context)
                {
                    Alc.MakeContextCurrent(IntPtr.Zero);
                }

                Alc.DestroyContext(_context);
            }

            _context = IntPtr.Zero;
        }
        public static void checkForErrors()
        {
            {
                IntPtr   device = Alc.GetContextsDevice(Alc.GetCurrentContext());
                AlcError error  = Alc.GetError(device);

                if (error != AlcError.NoError)
                {
                    Trace.WriteLine("ALC ERROR: (" + error + ")  " + Alc.GetString(device, (AlcGetString)error));
                }
            }
            {
                ALError error = AL.GetError();
                if (error != ALError.NoError)
                {
                    Trace.WriteLine("AL ERROR: (" + error + ") " + AL.GetErrorString(error));
                }
            }
        }
        static void Main()
        {
            Trace.Listeners.RemoveAt(0);
            Trace.Listeners.Add(new ConsoleTraceListener());

            Trace.WriteLine("This application is currently running as " + (IntPtr.Size == 4 ? "x86" : "x64"));

            DeviceDiagnostic DevDiag = new DeviceDiagnostic();

            DevDiag.Print();
            DevDiag = null;

            using (AudioContext A = new AudioContext())
            {
                AlcDiagnostic AlcDiag = new AlcDiagnostic(Alc.GetContextsDevice(Alc.GetCurrentContext()));
                checkForErrors();
                AlcDiag.Print();
                AlcDiag = null;

                ALDiagnostic ALdiag = new ALDiagnostic(A);
                checkForErrors();
                ALdiag.Print();
                ALdiag = null;

                EfxDiagnostic EfxDiag = new EfxDiagnostic();
                checkForErrors();
                EfxDiag.Print();
                EfxDiag = null;

                XRamDiagnostic XRamDiag = new XRamDiagnostic();
                checkForErrors();
                XRamDiag.Print();
                XRamDiag = null;

                RecorderDiagnostic rec = new RecorderDiagnostic();
                rec.Print();
                rec = null;
            }

            // Trace.WriteLine("All done. Press Enter to exit.");
            // Console.ReadLine();
        }
예제 #8
0
        private OpenALSoundController()
        {
            try
            {
                this.context = new AudioContext();
            }
            catch (Exception ex)
            {
                OpenALSoundController.Log("Last error in enumerator is " + AudioDeviceEnumerator.LastError);
                int num = (int)MessageBox.Show("Error initializing audio subsystem. Game will now exit.\n(see debug log for more details)", "OpenAL Error", MessageBoxButtons.OK, MessageBoxIcon.Hand);
                throw;
            }
            OpenALSoundController.Log("Sound manager initialized!");
            int[]  data1          = new int[1];
            IntPtr contextsDevice = Alc.GetContextsDevice(Alc.GetCurrentContext());

            Alc.GetInteger(contextsDevice, AlcGetInteger.AttributesSize, 1, data1);
            int[] data2 = new int[data1[0]];
            Alc.GetInteger(contextsDevice, AlcGetInteger.AllAttributes, data1[0], data2);
            for (int index = 0; index < data2.Length; ++index)
            {
                if (data2[index] == 4112)
                {
                    OpenALSoundController.Log("Available mono sources : " + (object)data2[index + 1]);
                    break;
                }
            }
            this.filterId = ALHelper.Efx.GenFilter();
            ALHelper.Efx.Filter(this.filterId, EfxFilteri.FilterType, 1);
            ALHelper.Efx.Filter(this.filterId, EfxFilterf.LowpassGain, 1f);
            ALHelper.Efx.Filter(this.filterId, EfxFilterf.LowpassGainHF, 1f);
            AL.DistanceModel(ALDistanceModel.InverseDistanceClamped);
            this.freeBuffers = new ConcurrentStack <int>();
            this.ExpandBuffers(256);
            this.allocatedBuffers   = new Dictionary <SoundEffect, OpenALSoundController.BufferAllocation>();
            this.staleAllocations   = new List <KeyValuePair <SoundEffect, OpenALSoundController.BufferAllocation> >();
            this.filteredSources    = new HashSet <int>();
            this.activeSoundEffects = new List <SoundEffectInstance>();
            this.freeSources        = new ConcurrentStack <int>();
            this.ExpandSources(64);
        }
        /// <summary>
        /// Open the sound device, sets up an audio context, and makes the new context
        /// the current context. Note that this method will stop the playback of
        /// music that was running prior to the game start. If any error occurs, then
        /// the state of the controller is reset.
        /// </summary>
        /// <returns>True if the sound controller was setup, and false if not.</returns>
        private bool OpenSoundController()
        {
#if MONOMAC || IOS
            alcMacOSXMixerOutputRate(PREFERRED_MIX_RATE);
#endif

            try
            {
                _device = Alc.OpenDevice(string.Empty);
            }
            catch (Exception ex)
            {
                _SoundInitException = ex;
                return(false);
            }
            if (CheckALError("Could not open AL device"))
            {
                return(false);
            }
            if (_device != IntPtr.Zero)
            {
#if ANDROID
                // Attach activity event handlers so we can pause and resume all playing sounds
                AndroidGameActivity.Paused  += Activity_Paused;
                AndroidGameActivity.Resumed += Activity_Resumed;

                // Query the device for the ideal frequency and update buffer size so
                // we can get the low latency sound path.

                /*
                 * The recommended sequence is:
                 *
                 * Check for feature "android.hardware.audio.low_latency" using code such as this:
                 * import android.content.pm.PackageManager;
                 * ...
                 * PackageManager pm = getContext().getPackageManager();
                 * boolean claimsFeature = pm.hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
                 * Check for API level 17 or higher, to confirm use of android.media.AudioManager.getProperty().
                 * Get the native or optimal output sample rate and buffer size for this device's primary output stream, using code such as this:
                 * import android.media.AudioManager;
                 * ...
                 * AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
                 * String sampleRate = am.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE));
                 * String framesPerBuffer = am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER));
                 * Note that sampleRate and framesPerBuffer are Strings. First check for null and then convert to int using Integer.parseInt().
                 * Now use OpenSL ES to create an AudioPlayer with PCM buffer queue data locator.
                 *
                 * See http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android
                 */

                int frequency     = DEFAULT_FREQUENCY;
                int updateSize    = DEFAULT_UPDATE_SIZE;
                int updateBuffers = DEFAULT_UPDATE_BUFFER_COUNT;
                if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBeanMr1)
                {
                    Android.Util.Log.Debug("OAL", Game.Activity.PackageManager.HasSystemFeature(PackageManager.FeatureAudioLowLatency) ? "Supports low latency audio playback." : "Does not support low latency audio playback.");

                    var audioManager = Game.Activity.GetSystemService(Context.AudioService) as AudioManager;
                    if (audioManager != null)
                    {
                        var result = audioManager.GetProperty(AudioManager.PropertyOutputSampleRate);
                        if (!string.IsNullOrEmpty(result))
                        {
                            frequency = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                        result = audioManager.GetProperty(AudioManager.PropertyOutputFramesPerBuffer);
                        if (!string.IsNullOrEmpty(result))
                        {
                            updateSize = int.Parse(result, CultureInfo.InvariantCulture);
                        }
                    }

                    // If 4.4 or higher, then we don't need to double buffer on the application side.
                    // See http://stackoverflow.com/a/15006327
                    // Use the explicit value rather than a constant as the 4.2 SDK (the build SDK) does not define a constant for 4.4.
                    if ((int)Android.OS.Build.VERSION.SdkInt >= 19)
                    {
                        updateBuffers = 1;
                    }
                }
                else
                {
                    Android.Util.Log.Debug("OAL", "Android 4.2 or higher required for low latency audio playback.");
                }
                Android.Util.Log.Debug("OAL", "Using sample rate " + frequency + "Hz and " + updateBuffers + " buffers of " + updateSize + " frames.");

                // These are missing and non-standard ALC constants
                const int AlcFrequency     = 0x1007;
                const int AlcUpdateSize    = 0x1014;
                const int AlcUpdateBuffers = 0x1015;

                int[] attribute = new[]
                {
                    AlcFrequency, frequency,
                    AlcUpdateSize, updateSize,
                    AlcUpdateBuffers, updateBuffers,
                    0
                };
#elif IOS
                AudioSession.Initialize();

                AudioSession.Interrupted += (sender, e) => {
                    AudioSession.SetActive(false);
                    Alc.MakeContextCurrent(ContextHandle.Zero);
                    Alc.SuspendContext(_context);
                };
                AudioSession.Resumed += (sender, e) => {
                    AudioSession.SetActive(true);
                    Alc.MakeContextCurrent(_context);
                    Alc.ProcessContext(_context);
                };

                int[] attribute = new int[0];
#elif !DESKTOPGL
                int[] attribute = new int[0];
#endif

#if DESKTOPGL
                _acontext    = new AudioContext();
                _context     = Alc.GetCurrentContext();
                _oggstreamer = new OggStreamer();
#else
                _context = Alc.CreateContext(_device, attribute);
#endif

                if (CheckALError("Could not create AL context"))
                {
                    CleanUpOpenAL();
                    return(false);
                }

                if (_context != ContextHandle.Zero)
                {
                    Alc.MakeContextCurrent(_context);
                    if (CheckALError("Could not make AL context current"))
                    {
                        CleanUpOpenAL();
                        return(false);
                    }
                    return(true);
                }
            }
            return(false);
        }
예제 #10
0
        protected bool init()
        {
            if (myIsInitialized == true)
            {
                return(true);
            }


            Info.print("------------------AUDIO MANAGER----------------");

            try
            {
                //try to get the DirectSound default (openAL-soft's target)
                string defaultDevice = AudioContext.DefaultDevice;
                myContext = new AudioContext(defaultDevice);
            }
            catch (AudioException ex)
            {
                Error.print("Exception trying to initialize OpenAL Context.  Verify OpenAL drivers are installed");
                Error.print("Exception: {0}", ex.Message);
                if (ex.InnerException != null)
                {
                    Error.print("Inner Exception: {0}", ex.InnerException.Message);
                }

                return(false);
            }

            //make the context current
            myContext.MakeCurrent();
            myContext.CheckErrors();
            myDevice = Alc.GetContextsDevice(Alc.GetCurrentContext());

            //print out the attributs
            int attributeSize = 0;

            Alc.GetInteger(myDevice, AlcGetInteger.AttributesSize, 1, out attributeSize);
            int[] attBuffer = new int[attributeSize * 2 + 1];
            Alc.GetInteger(myDevice, AlcGetInteger.AllAttributes, attributeSize * 2 + 1, attBuffer);
            int idx = 0;

            while (attBuffer[idx] != 0)
            {
                Info.print(String.Format("Context attribute: {0}:{1}", Audio.enumToString(attBuffer[idx]), attBuffer[idx + 1]));
                idx += 2;
            }

            //print some debug information about the system
            string alExtensions  = AL.Get(ALGetString.Extensions);
            string alcExtensions = Alc.GetString(myDevice, AlcGetString.Extensions);

            Info.print("Opened Audio device {0}", myContext.ToString());
            Info.print("OpenAL Vendor: {0}", AL.Get(ALGetString.Vendor));
            Info.print("OpenAL Version: {0}", AL.Get(ALGetString.Version));
            Info.print("OpenAL Renderer: {0}", AL.Get(ALGetString.Renderer));
            Info.print("OpenAL Extensions: {0}", AL.Get(ALGetString.Extensions));
            Info.print("OpenAL Context Extensions: {0} ", Alc.GetString(myDevice, AlcGetString.Extensions));



            string[] extensions = alcExtensions.Split(' ');
            for (int i = 0; i < extensions.Length; i++)
            {
                if (extensions[i] == "ALC_EXT_EFX")
                {
                    myEnvironmentalProcessingAvailable = true;
                }
            }

            Info.print("Environmental Processing: " + (myEnvironmentalProcessingAvailable ? "available" : "unavailable"));

            createVoices(myMaxVoices);

            Info.print("------------------AUDIO MANAGER----------------");

            return(true);
        }
예제 #11
0
        public static void AlcUnitTestFunc()
        {
            AudioContext context = new AudioContext();

            Trace.WriteLine("Testing AudioContext functions.");
            Trace.Indent();

//            Trace.WriteLine("Suspend()...");
//            context.Suspend();
//            Trace.Assert(!context.IsProcessing);
//
//            Trace.WriteLine("Process()...");
//            context.Process();
//            Trace.Assert(context.IsProcessing);

            //Trace.WriteLine("MakeCurrent()...");
            //context.MakeCurrent();
            //Trace.Assert(context.IsCurrent);

            //Trace.WriteLine("IsCurrent = false...");
            //context.IsCurrent = false;
            //Trace.Assert(!context.IsCurrent);

            //Trace.WriteLine("IsCurrent = true...");
            //context.IsCurrent = true;
            //Trace.Assert(context.IsCurrent);

            Trace.WriteLine("AudioContext.CurrentContext...");
            Trace.Assert(AudioContext.CurrentContext == context);

            #region Get Attribs

            //int AttribCount;
            //Alc.GetInteger(context.Device, AlcGetInteger.AttributesSize, sizeof(int), out AttribCount);
            //Trace.WriteLine("AttributeSize: " + AttribCount);

            //if (AttribCount > 0)
            //{
            //    int[] Attribs = new int[AttribCount];
            //    Alc.GetInteger(context.Device, AlcGetInteger.AllAttributes, AttribCount, out Attribs[0]);
            //    for (int i = 0; i < Attribs.Length; i++)
            //    {
            //        Trace.Write(Attribs[i]);
            //        Trace.Write(" ");
            //    }
            //    Trace.WriteLine();
            //}

            #endregion Get Attribs

#if false
            AlDevice  MyDevice;
            AlContext MyContext;

            // Initialize Open AL
            MyDevice = Alc.OpenDevice(null);  // open default device
            if (MyDevice != Al.Null)
            {
                Trace.WriteLine("Device allocation succeeded.");
                MyContext = Alc.CreateContext(MyDevice, Al.Null);   // create context
                if (MyContext != Al.Null)
                {
                    Trace.WriteLine("Context allocation succeeded.");
                    GetOpenALErrors(MyDevice);

                    Alc.SuspendContext(MyContext);                      // disable context
                    Alc.ProcessContext(MyContext);                      // enable context. The default state of a context created by alcCreateContext is that it is processing.
                    Al.Bool result = Alc.MakeContextCurrent(MyContext); // set active context
                    Trace.WriteLine("MakeContextCurrent succeeded? " + result);
                    GetOpenALErrors(MyDevice);

                    Trace.WriteLine("Default: " + Alc.GetString(MyDevice, Enums.AlcGetString.DefaultDeviceSpecifier));
                    Trace.WriteLine("Device: " + Alc.GetString(MyDevice, Enums.AlcGetString.DeviceSpecifier));
                    Trace.WriteLine("Extensions: " + Alc.GetString(MyDevice, Enums.AlcGetString.Extensions));
                    GetOpenALErrors(MyDevice);

                    #region Get Attribs
                    int AttribCount;
                    Alc.GetInteger(MyDevice, Enums.AlcGetInteger.AttributesSize, sizeof(int), out AttribCount);
                    Trace.WriteLine("AttributeSize: " + AttribCount);

                    if (AttribCount > 0)
                    {
                        int[] Attribs = new int[AttribCount];
                        Alc.GetInteger(MyDevice, Enums.AlcGetInteger.AttributesSize, AttribCount, out Attribs[0]);
                        for (int i = 0; i < Attribs.Length; i++)
                        {
                            Trace.Write(", " + Attribs[i]);
                        }
                        Trace.WriteLine( );
                    }
                    #endregion Get Attribs
                    GetOpenALErrors(MyDevice);

                    AlDevice  currdev = Alc.GetContextsDevice(MyContext);
                    AlContext currcon = Alc.GetCurrentContext( );

                    if (MyDevice == currdev)
                    {
                        Trace.WriteLine("Devices match.");
                    }
                    else
                    {
                        Trace.WriteLine("Error: Devices do not match.");
                    }

                    if (MyContext == currcon)
                    {
                        Trace.WriteLine("Context match.");
                    }
                    else
                    {
                        Trace.WriteLine("Error: Contexts do not match.");
                    }

                    // exit
                    Alc.MakeContextCurrent(Al.Null);   // results in no context being current
                    Alc.DestroyContext(MyContext);
                    result = Alc.CloseDevice(MyDevice);
                    Trace.WriteLine("Result: " + result);
                    Trace.ReadLine( );
                }
                else
                {
                    Trace.WriteLine("Context creation failed.");
                }
            }
            else
            {
                Trace.WriteLine("Failed to find suitable Device.");
            }
#endif

            /*
             * include <stdlib.h>
             * include <AL/alut.h>
             *
             * int
             * main (int argc, char **argv)
             * {
             * ALuint helloBuffer, helloSource;
             * alutInit (&argc, argv);
             * helloBuffer = alutCreateBufferHelloWorld ();  alGenSources (1, &helloSource);
             * alSourcei (helloSource, AL_Buffer, helloBuffer);
             * alSourcePlay (helloSource);
             * alutSleep (1);
             * alutExit ();
             * return EXIT_SUCCESS;
             * }*/

            /*
             *
             * Processing Loop Example:
             * // PlaceCamera - places OpenGL camera & updates OpenAL listener buffer
             * void AVEnvironment::PlaceCamera()
             * {
             * // update OpenGL camera position
             * glMatrixMode(GL_PROJECTION);
             * glLoadIdentity();
             * glFrustum(-0.1333, 0.1333, -0.1, 0.1, 0.2, 50.0);
             * gluLookAt(listenerPos[0], listenerPos[1], listenerPos[2],
             * (listenerPos[0] + sin(listenerAngle)), listenerPos[1],
             * (listenerPos[2] - cos(listenerAngle)),
             * 0.0, 1.0, 0.0);
             * // update OpenAL
             * // place listener at camera
             * alListener3f(AL_POSITION, listenerPos[0], listenerPos[1], listenerPos[2]);
             * float directionvect[6];
             * directionvect[0] = (float) sin(listenerAngle);
             * directionvect[1] = 0;
             * directionvect[2] = (float) cos(listenerAngle);
             * directionvect[3] = 0;
             * directionvect[4] = 1;
             * directionvect[5] = 0;
             * alListenerfv(AL_ORIENTATION, directionvect);
             * }
             *
             */
        }