Exemplo n.º 1
0
    public Vector4[] Read(List <Note> ptn, float stepLength, AudioContext ac)
    {
        Vector4[] data = new Vector4[ac.length];
        //float[] beatphase = beatPh.Read(ac.clock.BPM/60.0,ac);
        //float[] oscphase = oscPh.Read((double)freq, ac);



        for (int n = 0; n < data.Length; n += ac.channels)
        {
            float barpos    = (float)ac.clock.readCache(n);
            float loopphase = (float)Clock.Phase(barpos, 2.0);



            var ptnphase = Pattern.Read(ptn, barpos);
            var trigP    = ptnphase.phase;
            var note     = ptnphase.note;
            var scale    = Note.NotenumToScale(note.noteNum, ac);
            var pitch    = Note.ScaleToFreq(scale, ac);


            var osc = Read((float)pitch, (float)trigP, ac);
            // float s = Sine(oscPh.read(freq, ac), ac) * amp * sawsq;
            float envA = (float)DSP.Env(0.01f, 0.99f, 1, trigP, ac);
            float o    = ((float)osc) * (envA);

            for (int i = 0; i < ac.channels; i++)
            {
                data[n + i] = new Vector4(o, envA, (float)pitch, loopphase);
            }
        }
        return(data);
    }
Exemplo n.º 2
0
 public OpenTKAudioPlugin()
 {
     if (!disabled)
     {
         Context = new AudioContext();
     }
 }
Exemplo n.º 3
0
 public GameScene(SceneManager sceneManager, SystemManager systemManager, EntityManager entityManager) : base(sceneManager, systemManager, entityManager)
 {
     // Set the title of the window
     sceneManager.Title             = "Gorillas";
     sceneManager.Keyboard.KeyDown += Keyboard_KeyDown;
     sceneManager.camera.Position   = new Vector3(1.5f, 6.0f, 15.0f);
     sceneManager.camera.Rotate(0.0f, 0.0f, 0.0f, 1.0f);
     // Set the Render and Update delegates to the Update and Render methods of this class
     sceneManager.renderer = Render;
     sceneManager.updater  = Update;
     GL.ClearColor(0.0f, 0.0f, 0.0f, 1.0f);
     // Load entities and systems
     CreateEntities();
     CreateSystems();
     // Variable intilisation
     audioContext = new AudioContext();
     // Game Variables
     isAI           = true;                       // Default settings have AI active - may change when settings are loaded in
     isPlayer1      = true;                       // True = player1 turn and false = player2 turn
     hasHitSun      = false;                      // Used to see if banana has hit the sun so texture can be changed
     Input          = ""; Angle = ""; Power = ""; // Used to allow player to type in their inputs
     InputID        = 0;                          // Tracks which input is currently inputed (angle/power/ready to shoot)
     ProjectileTime = 0.0f;                       // Handles how long the banana is in flight for
     AiInputTimer   = 0.0f;                       // Used to space out AI inputs so they are not instant once p1 has finished
     sunHitTimer    = 0.0f;                       // Used to time how long the suns texture has changed for when hit
     WindSpeed      = GenerateWindValue(3, 6);    // Wind speeds works on scale of -1 to 1 to manipulate player power input
     // Load in game settings (names, Sounds, etc.)
     LoadGameSettings();
     LoadAudioFiles();
 }
Exemplo n.º 4
0
    public static double NotenumToFreq(int note, AudioContext ac)
    {
        if (note == 0)
        {
            return(0);
        }
        var scale = new double[] {
            16.0 / 16.0,  // C    1
            17.0 / 16.0,  // C# 2
            18.0 / 16.0,  // D   3
            19.0 / 16.0,  // D#
            20.0 / 16.0,  // E 3rd 5
            21.0 / 16.0,  // F 4th 6
            22.0 / 16.0,  // F#
            23.0 / 16.0,  // Gb

            24.0 / 16.0,  // G  9
            25.0 / 16.0,  // G#
            26.0 / 16.0,  // Ab

            27.0 / 16.0,  // A  12
            28.0 / 16.0,  // A#
            29.0 / 16.0,  // Bb
            30.0 / 16.0,  // B 15
            31.0 / 16.0,  // B#
        };
        var oct = -3 + (note - 1) / scale.Length;

        return(Mathf.Pow((float)2, oct) * scale[(note - 1) % scale.Length]);
    }
Exemplo n.º 5
0
    public double read(double inval, double cutoff, double reso, AudioContext ac)
    {
        var sample_rate = ac.SampleRate;
        var r           = Math.Sqrt(2) * (1.01 - reso);

        var f = (sample_rate / 2.0) * (cutoff) + 10;

        c = 1.0 / Math.Tan(Math.PI * f / sample_rate);

        a1 = 1.0 / (1.0 + r * c + c * c);
        a2 = 2 * a1;
        a3 = a1;
        b1 = 2.0 * (1.0 - c * c) * a1;
        b2 = (1.0 - r * c + c * c) * a1;

        in2 = in1;
        in1 = in0;
        in0 = inval;

        out2 = out1;
        out1 = out0;
        out0 = a1 * in0 + a2 * in1 + a3 * in2 - b1 * out1 - b2 * out2;



        return(out0);
    }
Exemplo n.º 6
0
        public OpenALSoundOutput(IHostAudioManager sound)
        {
            _sound = sound;
            string deviceName = GetDeviceNames().FirstOrDefault(n => n == _sound.ConfigDevice);

            _context = new AudioContext(deviceName, _sound.SampleRate);
        }
Exemplo n.º 7
0
        public OpenALSoundOutput(Sound sound)
        {
            _sound = sound;
            string deviceName = GetDeviceNames().FirstOrDefault(n => n == GlobalWin.Config.SoundDevice);

            _context = new AudioContext(deviceName, Sound.SampleRate);
        }
Exemplo n.º 8
0
        protected override void StartNextTest()
        {
            // Set everything up.
            base.StartNextTest();

            // Tweak a few parameters.
            mPlayer.VisualizationRate   = 10;
            mRecorder.VisualizationRate = 10;

            // Actually run the test. Since the media element in the player is null, this is how to trigger the echo cancellation and all the rest.
            ThreadPool.QueueUserWorkItem(o =>
            {
                var resampler = new ResampleFilter(audioFormat, audioFormat);
                var dtx       = new DtxFilter(audioFormat);
                var encoder   = new G711MuLawEncoder(audioFormat);
                var ctx       = new AudioContext(audioFormat, resampler, dtx, mEchoCancelFilter, encoder);
                for (int i = 0; i < SourceFrames.Count && i < SpeakerFrames.Count; i++)
                {
                    if (mStopRequested)
                    {
                        mStopRequested = false;
                        return;
                    }
                    int index = i;

                    // This has the (necessary) side-effect of registering the (virtually) played frame.
                    mPlayer.GetNextAudioFrame(ms => mRecorder.SubmitRecordedFrame(ctx, SpeakerFrames[index]));
                }

                // Stopping everything has the (necessary) side-effect of starting the next test, if there is one.
                mRecorder.StopRecording();
                mPlayer.StopPlaying();
            });
        }
Exemplo n.º 9
0
    // ReSharper disable once InconsistentNaming
    private static void Main()
    {
        // Audio context always has to be initialized before any audio code.
        AudioContext.Initialize();

        Console.WriteLine("Press [space] to play pew. Press [escape] to exit.");

        while (true)
        {
            var key = Console.ReadKey();

            if (key.Key == ConsoleKey.Escape)
            {
                break;
            }

            if (key.Key == ConsoleKey.Spacebar)
            {
                playPewSound();
            }
        }

        // Dispose audio resources.
        AudioContext.Instance.Dispose();
    }
Exemplo n.º 10
0
        internal OpenTKAudioCue(Stream data, AudioContext ac)
        {
            this.ac = ac;

            buffer = AL.GenBuffer();
            ac.CheckErrors();

            source = AL.GenSource();
            ac.CheckErrors();

            AL.Source(source, ALSourcef.Gain, (float)this.Volume);
            ac.CheckErrors();

#warning OpenTK Audio is not implemented.
            //using (AudioReader ar = new AudioReader(data))
            //{
            //    SoundData d = ar.ReadToEnd();
            //    AL.BufferData(source, d);
            //    ac.CheckErrors();
            //}

            AL.Source(source, ALSourcei.Buffer, buffer);
            ac.CheckErrors();

            this.VolumeChanged  += new VolumeChangedEventHandler(OpenTKAudioCue_VolumeChanged);
            this.BalanceChanged += new BalanceChangedEventHandler(OpenTKAudioCue_BalanceChanged);
            this.FadeChanged    += new FadeChangedEventHandler(OpenTKAudioCue_FadeChanged);
        }
Exemplo n.º 11
0
 private static void InitilizeSoundServices()
 {
     if (context == null)
     {
         context = new AudioContext();
     }
 }
Exemplo n.º 12
0
    public static Vector2 Read(int[] ptn, float stepLength, float barpos, AudioContext ac)
    {
        if (ptn == null)
        {
            return(new Vector2());
        }
        var stepNum = (int)ptn.Length;

        double loopLength = stepNum * stepLength;
        double loopphase  = Clock.Phase(barpos, loopLength);
        double loopPos    = loopphase * loopLength;
        int    step       = (int)((loopPos) / stepLength);
        var    pitch      = ptn[step];

        //float loopPhase = loopPos / loopLength;

        var stepPhase = (loopPos % stepLength) / stepLength;
        //var trig = Mathf.Clamp(pitch, 0,1);
        var trigPhase = stepPhase;

        if (loopPos % stepLength == 0)
        {
            trigPhase = stepPhase;
        }

        return(new Vector2(pitch, (float)trigPhase));
    }
Exemplo n.º 13
0
        public static void Main()
        {
            using (AudioContext context = new AudioContext())
            {
                int buffer = AL.GenBuffer();
                int source = AL.GenSource();
                int state;

                int    channels, bits_per_sample, sample_rate;
                byte[] sound_data = LoadWave(File.Open(filename, FileMode.Open), out channels, out bits_per_sample, out sample_rate);
                AL.BufferData(buffer, GetSoundFormat(channels, bits_per_sample), sound_data, sound_data.Length, sample_rate);

                AL.Source(source, ALSourcei.Buffer, buffer);
                AL.SourcePlay(source);

                Trace.Write("Playing");

                // Query the source to find out when it stops playing.
                do
                {
                    Thread.Sleep(250);
                    Trace.Write(".");
                    AL.GetSource(source, ALGetSourcei.SourceState, out state);
                }while ((ALSourceState)state == ALSourceState.Playing);

                Trace.WriteLine("");

                AL.SourceStop(source);
                AL.DeleteSource(source);
                AL.DeleteBuffer(buffer);
            }
        }
Exemplo n.º 14
0
        void IDualityBackend.Shutdown()
        {
            // Shut down the streaming thread
            if (this.streamWorker != null)
            {
                this.streamWorkerEnd = true;
                if (!this.streamWorker.Join(1000))
                {
                    this.streamWorker.Abort();
                }
                this.streamWorkerQueueEvent.Dispose();
                this.streamWorkerEnd        = false;
                this.streamWorkerQueueEvent = null;
                this.streamWorkerQueue      = null;
                this.streamWorker           = null;
            }

            if (activeInstance == this)
            {
                activeInstance = null;
            }

            // Clear OpenAL source pool
            foreach (int alSource in this.sourcePool)
            {
                AL.DeleteSource(alSource);
            }

            // Shut down OpenAL context
            if (this.context != null)
            {
                this.context.Dispose();
                this.context = null;
            }
        }
Exemplo n.º 15
0
 internal AudioResource(AudioContext context, int alId)
 {
     if (context == null)
         throw new ArgumentNullException("context");
     this.context = context;
     this.id = alId;
 }
Exemplo n.º 16
0
 public ALAudioBackend()
 {
     try
     {
         _context = new AudioContext();
         IsValid  = true;
     }
     catch (AudioContextException e1)
     {
         Debug.WriteLine("Failed to create audio context, audio will not be played.");
         Debug.WriteLine(e1.ToString());
     }
     catch (DllNotFoundException e2)
     {
         Debug.WriteLine("Failed to find OpenAL Soft dll, audio will not be played.");
         Debug.WriteLine(e2.ToString());
     }
     catch (TypeInitializationException e3)
     {
         Debug.WriteLine("Failed to load OpenAL Soft dll (this might because the dll is missing, or corrupt, or another reason), audio will not be played.");
         Debug.WriteLine(e3.ToString());
     }
     catch (AudioDeviceException e4)
     {
         Debug.WriteLine("Failed to load audio device, audio will not be played.");
         Debug.WriteLine(e4.ToString());
     }
 }
Exemplo n.º 17
0
 public Audio()
 {
     context  = new AudioContext();
     streamer = new OggStreamer();
     player   = new AudioPlayer();
     player.Run();
 }
Exemplo n.º 18
0
        public override void Dispose()
        {
            base.Dispose();
            context?.Dispose();

            context = null;
        }
Exemplo n.º 19
0
 public AudioEffect(AudioContext context, AudioEffectType type)
     : base(context, Create(context))
 {
     using (Context.Bind())
         Context.AlEffecti(Id, AlEfxEnums.EffectType, (int)type);
     sourcesReadOnly = new ReadOnlyObservableCollection<AudioSource>(sources);
 }
Exemplo n.º 20
0
        private void Initialize(string deviceName)
        {
            try
            {
                lock (_syncObject)
                {
                    _sources = new Dictionary <string, SourceDescription>();

                    if (string.IsNullOrEmpty(deviceName))
                    {
                        deviceName = AudioContext.DefaultDevice;
                    }

                    if (!AudioContext.AvailableDevices.Contains(deviceName))
                    {
                        deviceName = AudioContext.DefaultDevice;
                    }

                    _context = new AudioContext(deviceName);
                }
            }
            catch (Exception e)
            {
                if (_context != null)
                {
                    _context.Dispose();
                }

                _context = null;

                ClientModel.Logger.Write(e);
                throw new ModelException(ErrorCode.AudioNotEnabled, "Audio player do not initialized.", e, deviceName);
            }
        }
Exemplo n.º 21
0
        public static void PlaySound(string fileName, float volume, bool loop)
        {
            if (context == null)
            {
                context = new AudioContext();
            }

            SoundEntry entry = new SoundEntry
            {
                Buffer = AL.GenBuffer(),
                Source = AL.GenSource()
            };

            AL.Source(entry.Source, ALSourcef.Gain, volume);
            AL.Source(entry.Source, ALSourceb.Looping, loop);

            int channels, bits_per_sample, sample_rate;

            byte[] sound_data = LoadWave(File.Open(fileName, FileMode.Open), out channels, out bits_per_sample, out sample_rate);
            AL.BufferData(entry.Buffer, GetSoundFormat(channels, bits_per_sample), sound_data, sound_data.Length, sample_rate);

            AL.Source(entry.Source, ALSourcei.Buffer, entry.Buffer);
            AL.SourcePlay(entry.Source);

            soundEntries.Add(entry);
        }
Exemplo n.º 22
0
 public void Init(Client tclient, ClientCVar cvar)
 {
     if (Context != null)
     {
         Context.Dispose();
     }
     TheClient = tclient;
     CVars     = cvar;
     Context   = new AudioContext(AudioContext.DefaultDevice, 0, 0, false, true);
     Context.MakeCurrent();
     try
     {
         if (Microphone != null)
         {
             Microphone.StopEcho();
         }
         Microphone = new MicrophoneHandler(this);
     }
     catch (Exception ex)
     {
         SysConsole.Output("Loading microphone handling", ex);
     }
     if (Effects != null)
     {
         foreach (SoundEffect sfx in Effects.Values)
         {
             sfx.Internal = -2;
         }
     }
     Effects    = new Dictionary <string, SoundEffect>();
     PlayingNow = new List <ActiveSound>();
     Noise      = LoadSound(new DataStream(Convert.FromBase64String(NoiseDefault.NoiseB64)), "noise");
 }
Exemplo n.º 23
0
    public Vector4[] Read(AudioContext ac)
    {
        var data = new Vector4[ac.length];

        //float[] beatphase = beatPh.Read(ac.clock.BPM/60.0,ac);
        //float[] oscphase = oscPh.Read((double)freq, ac);



        for (int n = 0; n < data.Length; n += ac.channels)
        {
            //float[] barphase = phase;
            float loopphase = (float)Clock.Phase(ac.clock.BarPosition, 1.0);
            var   phase     = ac.clock.readCache(n);

            float beatphase = (float)Clock.Beat4th(phase);
            float envA      = (float)DSP.Env(0.001f, 0.99f, 1, beatphase, ac);
            float envP      = (float)DSP.Env(0.001f, 0.8f, 6, beatphase, ac);
            var   osc       = oscPh.read(0.5 * ac.baseFreq * envP, ac);
            // float s = Sine(oscPh.read(freq, ac), ac) * amp * sawsq;
            float o = DSP.Sine((float)osc, ac) * envA;

            for (int i = 0; i < ac.channels; i++)
            {
                data[n + i] = new Vector4(o, beatphase, envA, loopphase);
            }
        }
        return(data);
    }
Exemplo n.º 24
0
        static void Main(string[] args)
        {
            using (var _ = new AudioContext()) {
                var(bytes, alFormat, sampleRate) = WavReader.ReadWav(File.OpenRead("assets/pew.wav"));

                var buffers = AL.GenBuffers(bytes.Count);
                for (var i = 0; i < buffers.Length; i++)
                {
                    AL.BufferData(buffers[i], alFormat, bytes[i], bytes[i].Length, sampleRate);
                }

                var source = AL.GenSource();
                AL.SourceQueueBuffers(source, buffers.Length, buffers);

                AL.Source(source, ALSourceb.Looping, true);

                Console.WriteLine("Press button to play sound");
                Console.ReadKey();

                AL.SourcePlay(source);

                Console.WriteLine("Sound playing. Press button to exit");
                Console.ReadKey();

                AL.DeleteSource(source);
                AL.DeleteBuffers(buffers);
            }
        }
Exemplo n.º 25
0
    public Vector4[] Read(AudioContext ac)
    {
        Vector4[] data = new Vector4[ac.length];
        //float[] beatphase = beatPh.Read(ac.clock.BPM/60.0,ac);
        //float[] oscphase = oscPh.Read((double)freq, ac);



        for (int n = 0; n < data.Length; n += ac.channels)
        {
            var   ptn       = new int[] { 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 };
            float barpos    = (float)ac.clock.readCache(n);
            float loopPhase = barpos % (ptn.Length * 1.0f / 8);
            var   ptnphase  = Pattern.Read(ptn, 1.0f / 8, barpos, ac);
            var   trigP     = ptnphase.y;

            float envA = (float)DSP.Env(0.001f, 0.1f, 1, trigP, ac);
            float envP = (float)DSP.Env(0.001f, 0.99f, 9, trigP, ac);
            var   osc  = oscPh.read(ac.baseFreq, ac);
            // float s = Sine(oscPh.read(freq, ac), ac) * amp * sawsq;
            float o = DSP.noise((float)osc) * envA;

            for (int i = 0; i < ac.channels; i++)
            {
                data[n + i] = new Vector4(o, trigP, envA, loopPhase);
            }
        }
        return(data);
    }
Exemplo n.º 26
0
		public Sounds(Runtime runtime)
		{
			_runtime = runtime;
			if (NoSound)
			{
				Log($"Sound is disabled by runtime setting.");
				return;
			}

			if (Native.Platform != Platform.Windows)
			{
				_initError = true;
				Log($"Sound is not supported on {Native.Platform}.");
				return;
			}
			
			try
			{
				_audioContext = new AudioContext();
			}
			catch(TypeInitializationException)
			{
				_initError = true;
				Log("Could not initialize audio, is OpenAL installed?");
			}
			catch
			{
				_initError = true;
				Log("Could not initialize audio device.");
			}
		}
Exemplo n.º 27
0
        static AudioDevice()
        {
            // Create audio context (which create the ALCdevice and ALCcontext)
            _context = new AudioContext();
            _context.MakeCurrent();

            // Configure default state of listener
            _listenerVolume    = 100f;
            _listenerPosition  = new Vector3(0f, 0f, 0f);
            _listenerDirection = new Vector3(0f, 0f, -1f);
            _listenerUpVector  = new Vector3(0f, 1f, 0f);


            // Apply the listener properties the user might have set
            float[] orientation = { _listenerDirection.X,
                                    _listenerDirection.Y,
                                    _listenerDirection.Z,
                                    _listenerUpVector.X,
                                    _listenerUpVector.Y,
                                    _listenerUpVector.Z };

            ALChecker.Check(() => AL.Listener(ALListenerf.Gain, _listenerVolume * 0.01f));
            ALChecker.Check(() => AL.Listener(ALListener3f.Position, _listenerPosition.X, _listenerPosition.Y, _listenerPosition.Z));
            ALChecker.Check(() => AL.Listener(ALListenerfv.Orientation, ref orientation));

            // Dispose Audio Device when exiting application
            AppDomain.CurrentDomain.ProcessExit += (s, e) => Free();
        }
Exemplo n.º 28
0
        private void StreamingThread()
        {
            audiocontext = new AudioContext();
            source       = AL.GenSource();
            buffers      = AL.GenBuffers(10);
            AL.Source(source, ALSourceb.SourceRelative, true);

            for (int i = 0; i < 10; i++)
            {
                Stream(buffers[i]);
            }

            AL.SourceQueueBuffers(source, 2, buffers);
            AL.SourcePlay(source);

            while (true)
            {
                int processed;
                AL.GetSource(source, ALGetSourcei.BuffersProcessed, out processed);

                if (processed != 0)
                {
                    int bufferid = 0;
                    AL.SourceUnqueueBuffers(source, 1, ref bufferid);
                    Stream(bufferid);
                    AL.SourceQueueBuffer(source, bufferid);
                }
            }
        }
Exemplo n.º 29
0
        private void SoundThreadLoop()
        {
            _audioContext = new AudioContext();
            ALThread.BindThread();

            // Setup listener.
            AL.Listener(ALListener3f.Position, 0, 0, 0);
            AL.Listener(ALListener3f.Velocity, 0, 0, 0);

            while (Context.IsRunning)
            {
                // Update running playbacks.
                lock (_layers)
                {
                    foreach (KeyValuePair <string, SoundLayer> layer in _layers)
                    {
                        layer.Value.Update();
                    }
                }

                // Run queued actions.
                ALThread.Run();

                Helpers.CheckErrorAL("loop end");

                Task.Delay(1).Wait();
            }
        }
Exemplo n.º 30
0
    public float Read(Func <float, AudioContext, float> f, float freq, AudioContext ac)
    {
        var   phase = oscPh.read(freq, ac);
        float val   = f((float)phase, ac);

        return(val);
    }
Exemplo n.º 31
0
        private void Form1_Load(object sender, EventArgs e)
        {
            ac = new AudioContext();

            vst = new VstNode(ac, pathSylenth);
            vst.OpenEditor(panel1.Handle);
            vst.Connect(ac.Destination);
            try
            {
                vst.LoadProgram("mel_state.fxp");
            }
            catch (Exception ee)
            {
            }
            vstBass = new VstNode(ac, pathSylenth);
            vstBass.OpenEditor(panel2.Handle);
            vstBass.Connect(ac.Destination);

            try
            {
                vstBass.LoadProgram("bass_state.fxp");
            }
            catch (Exception ee)
            {
            }

            timer.Tick += Timer_Tick;
            timer.Start();

            stmDevice.KeyChange += StmDevice_KeyChange;
            stmDevice.Open();
        }
Exemplo n.º 32
0
        public OpenAlAudioManager(ContentManager content, string audioDirectoryName)
        {
            if (content == null)
            {
                throw new ArgumentNullException("content");
            }
            if (audioDirectoryName == null)
            {
                throw new ArgumentNullException("audioDirectoryName");
            }

            _content            = content;
            _audioDirectoryName = audioDirectoryName;

            // destroy previous context, specifically because monogame creates one
            var context = Alc.GetCurrentContext();

            Alc.DestroyContext(context);

            _context = new AudioContext();
            _context.MakeCurrent();

            _efx     = new EffectsExtension();
            _sounds  = new Dictionary <string, Sound>();
            _effects = new Dictionary <string, AudioEffect>();
            if (_efx.IsInitialized)
            {
                _effects.Add("lowpassfilter", new LowPassFilterAudioEffect(_efx));
            }
        }
Exemplo n.º 33
0
 static int Create(AudioContext context)
 {
     unsafe
     {
         int output;
         using (context.Bind())
             context.AlGenFilters(1, &output);
         return output;
     }
 }
Exemplo n.º 34
0
 static int Create(AudioContext context)
 {
     using (context.Bind())
     {
         int output;
         unsafe
         {
             context.AlGenEffects(1, &output);
             return output;
         }
     }
 }
Exemplo n.º 35
0
    /// <summary>
    /// Changes the music to a new track by crossfading.
    /// </summary>
    public void ChangeMusic( AudioContext destination )
    {
        //Store new track
        AudioClip track;

        //Check destination
        switch ( destination )
        {
            case AudioContext.MainMenu:
                track = titleScreen;
                break;
            case AudioContext.HowToPlay:
                track = howToPlay;
                break;
            case AudioContext.AbilitySelection:
                track = selectAbilities;
                break;
            case AudioContext.Gameplay:
                track = howToPlay;//gameplayTracks [ Random.Range ( 0, gameplayTracks.Length ) ];
                break;
            case AudioContext.Results:
                track = results;
                break;
            default:
                track = titleScreen;
                break;
        }

        //Fade music in and out
        Sequence fade = DOTween.Sequence ( )
            .Append ( GetComponent<AudioSource>().DOFade ( 0, FADE_TIME ) )
            .AppendCallback ( () =>
            {
                music.Stop ( );
                music.clip = track;
                music.Play ( );
            } )
            .Append ( GetComponent<AudioSource>().DOFade ( Settings.MusicVolume, FADE_TIME ) )
            .SetRecyclable ( )
            .Play ( );
    }
Exemplo n.º 36
0
 // TODO
 public Flanger(AudioContext context)
     : base(context, AudioEffectType.Flanger)
 {
 }
Exemplo n.º 37
0
 // TODO
 public Equalizer(AudioContext context)
     : base(context, AudioEffectType.Equalizer)
 {
 }
Exemplo n.º 38
0
 // TODO
 public Echo(AudioContext context)
     : base(context, AudioEffectType.Echo)
 {
 }
Exemplo n.º 39
0
 // TODO
 public Distortion(AudioContext context)
     : base(context, AudioEffectType.Distortion)
 {
 }
Exemplo n.º 40
0
 // TODO
 public Compressor(AudioContext context)
     : base(context, AudioEffectType.Compressor)
 {
 }
Exemplo n.º 41
0
 public OpenTkAudio()
 {
     context = new AudioContext();
 }
Exemplo n.º 42
0
 public Chorus(AudioContext context)
     : base(context, AudioEffectType.Chorus)
 {
 }
Exemplo n.º 43
0
 // TODO
 public AutoWah(AudioContext context)
     : base(context, AudioEffectType.AutoWah)
 {
 }
Exemplo n.º 44
0
    private void Initialize(string deviceName)
    {
      try
      {
        lock (syncObject)
        {
          sources = new Dictionary<string, SourceDescription>();

          if (string.IsNullOrEmpty(deviceName))
            deviceName = AudioContext.DefaultDevice;

          if (!AudioContext.AvailableDevices.Contains(deviceName))
            deviceName = AudioContext.DefaultDevice;

          context = new AudioContext(deviceName);
        }
      }
      catch (Exception e)
      {
        if (context != null)
          context.Dispose();

        context = null;

        ClientModel.Logger.Write(e);
        throw new ModelException(ErrorCode.AudioNotEnabled, "Audio player do not initialized.", e, deviceName);
      }
    }
Exemplo n.º 45
0
 public AudioBuffer ReadToBuffer(AudioContext context)
 {
     return ReadToBuffer(new AudioBuffer(context));
 }
Exemplo n.º 46
0
 // TODO
 public FrequencyShifter(AudioContext context)
     : base(context, AudioEffectType.FrequencyShifter)
 {
 }
Exemplo n.º 47
0
 // TODO
 public PitchShifter(AudioContext context)
     : base(context, AudioEffectType.PitchShifter)
 {
 }
Exemplo n.º 48
0
 public Null(AudioContext context)
     : base(context, AudioEffectType.Null)
 {
 }
Exemplo n.º 49
0
 // TODO
 public VocalMorpher(AudioContext context)
     : base(context, AudioEffectType.VocalMorpher)
 {
 }
Exemplo n.º 50
0
 // TODO
 public RingModulator(AudioContext context)
     : base(context, AudioEffectType.RingModulator)
 {
 }
Exemplo n.º 51
0
 internal AudioListener(AudioContext context)
 {
     this.context = context;
 }
Exemplo n.º 52
0
 public EaxReverb(AudioContext context)
     : base(context, AudioEffectType.EaxReverb)
 {
 }
Exemplo n.º 53
0
 public Null(AudioContext context)
     : base(context, AudioFilterType.Null)
 {
 }
Exemplo n.º 54
0
 public HighPass(AudioContext context)
     : base(context, AudioFilterType.HighPass)
 {
 }
Exemplo n.º 55
0
 public AudioFilter(AudioContext context, AudioFilterType type)
     : base(context, Create(context))
 {
     using (context.Bind())
         context.AlFilteri(Id, AlEfxEnums.FilterType, (int)type);
 }
Exemplo n.º 56
0
 public BandPass(AudioContext context)
     : base(context, AudioFilterType.BandPass)
 {
 }
Exemplo n.º 57
0
 public LowPass(AudioContext context)
     : base(context, AudioFilterType.LowPass)
 {
 }