Наследование: MonoBehaviour
Пример #1
0
    /// Initializes the audio system with the current audio configuration.
    /// @note This should only be called from the main Unity thread.
    public static void Initialize(CardboardAudioListener listener, Quality quality)
    {
        if (!initialized)
        {
            // Initialize the audio system.
#if UNITY_4_5 || UNITY_4_6 || UNITY_4_7
            sampleRate  = AudioSettings.outputSampleRate;
            numChannels = (int)AudioSettings.speakerMode;
            int numBuffers = -1;
            AudioSettings.GetDSPBufferSize(out framesPerBuffer, out numBuffers);
#else
            AudioConfiguration config = AudioSettings.GetConfiguration();
            sampleRate      = config.sampleRate;
            numChannels     = (int)config.speakerMode;
            framesPerBuffer = config.dspBufferSize;
#endif
            if (numChannels != (int)AudioSpeakerMode.Stereo)
            {
                Debug.LogError("Only 'Stereo' speaker mode is supported by Cardboard.");
                return;
            }
            Initialize(quality, sampleRate, numChannels, framesPerBuffer);
            listenerTransform = listener.transform;
            initialized       = true;

            Debug.Log("Cardboard audio system is initialized (Quality: " + quality + ", Sample Rate: " +
                      sampleRate + ", Channels: " + numChannels + ", Frames Per Buffer: " +
                      framesPerBuffer + ").");
        }
        else if (listener.transform != listenerTransform)
        {
            Debug.LogError("Only one CardboardAudioListener component is allowed in the scene.");
            CardboardAudioListener.Destroy(listener);
        }
    }
Пример #2
0
 private void Awake()
 {
     audioSource = this.GetComponent <AudioSource>();
     if (!audioSource)
     {
         audioSource = this.gameObject.AddComponent <AudioSource>();
     }
     audioSource.spatialBlend = 1;
     audioSource.playOnAwake  = false;
     if (AudioSettings.GetSpatializerPluginName() != null)
     {
         audioSource.spatialize = true;
     }
     lowPassFilter = this.GetComponent <AudioLowPassFilter>();
     if (!lowPassFilter)
     {
         lowPassFilter = this.gameObject.AddComponent <AudioLowPassFilter>();
     }
     lowPassFilter.enabled = false;
     highPassFilter        = this.GetComponent <AudioHighPassFilter>();
     if (!highPassFilter)
     {
         highPassFilter = this.gameObject.AddComponent <AudioHighPassFilter>();
     }
     highPassFilter.enabled = false;
     reverbFilter           = this.GetComponent <AudioReverbFilter>();
     if (!reverbFilter)
     {
         reverbFilter = this.gameObject.AddComponent <AudioReverbFilter>();
     }
     reverbFilter.reverbPreset = AudioReverbPreset.Off;
     reverbFilter.enabled      = false;
 }
Пример #3
0
        /// <summary>
        /// Starts microphone capture on the first available microphone.
        /// </summary>
        /// <returns>True if a microphone was available to capture, otherwise false.</returns>
        public bool StartMicrophone()
        {
            if (microphoneAudioSource == null)
            {
                Debug.LogWarning("No AudioSource for microphone audio was specified");
                return(false);
            }

            if (Microphone.devices.Length == 0)
            {
                Debug.LogWarning("No connected microphones detected");
                return(false);
            }

            int minFreq, maxFreq, reqFreq;

            Microphone.GetDeviceCaps(Microphone.devices[0], out minFreq, out maxFreq);
            reqFreq = Mathf.Clamp(DefaultMicrophoneFreq, minFreq, maxFreq);
            microphoneAudioSource.clip = Microphone.Start(Microphone.devices[0], true, 1, reqFreq);
            microphoneAudioSource.loop = true;

            // don't start playing the AudioSource until we have some data (else we get a weird doubleling of audio)
            StartCoroutine(StartAudioSourceCoroutine());

            AudioConfiguration currentConfiguration = AudioSettings.GetConfiguration();

            tickTime = (float)currentConfiguration.dspBufferSize / currentConfiguration.sampleRate;

            isInitialized = true;

            return(true);
        }
Пример #4
0
        public static void Init()
        {
            if (UniqueInstance != null)
            {
                Debug.LogWarning("GATInfo can only be initialized once!");
                return;
            }

            int nbOfChannels;

            switch (AudioSettings.speakerMode)
            {
            case AudioSpeakerMode.Mono:
                nbOfChannels = 1;
                break;

            case AudioSpeakerMode.Stereo:
                nbOfChannels = 2;
                break;

            case AudioSpeakerMode.Quad:
                nbOfChannels = 4;
                break;

            case AudioSpeakerMode.Surround:
                nbOfChannels = 5;
                break;

            case AudioSpeakerMode.Mode5point1:
                nbOfChannels = 6;
                break;

            case AudioSpeakerMode.Mode7point1:
                nbOfChannels = 8;
                break;

            default:
                nbOfChannels = 2;
                break;
            }

            int bufferSize;
            int numBuffers;

            AudioSettings.GetDSPBufferSize(out bufferSize, out numBuffers);

            double dspBufferDuration = (( double )(bufferSize)) / AudioSettings.outputSampleRate;

            UniqueInstance = new GATInfo(nbOfChannels, bufferSize, dspBufferDuration);

            if (RequestedSampleRate != 0 && OutputSampleRate != RequestedSampleRate)
            {
                Debug.LogWarning("Requested sample rate of " + RequestedSampleRate + " is not available on this platform.");
            }

                        #if GAT_DEBUG
            Debug.Log("Number of channels: " + nbOfChannels);
            Debug.Log("dsp buffer size: " + bufferSize + " duration: " + dspBufferDuration + "sample rate: " + OutputSampleRate);
                        #endif
        }
Пример #5
0
        void Awake()
        {
#if UNITY_STANDALONE_OSX && UNITY_PRO_LICENSE
            var sampleRate = AudioSettings.outputSampleRate;

            // Create an audio source.
            audioSource             = gameObject.AddComponent <AudioSource>();
            audioSource.playOnAwake = false;
            audioSource.loop        = true;

            // Initialize the Lasp module.
            Lasp.Initialize();

            // Shrink the DSP buffer to reduce latency.
            AudioSettings.SetDSPBufferSize(bufferSize, 4);

            // Create a null clip and kick it.
            audioSource.clip = AudioClip.Create("Lasp", 1024, 1, sampleRate, false, false);
            audioSource.Play();

            // Estimate the latency.
            estimatedLatency = (float)bufferSize / sampleRate;
#else
            Debug.LogWarning("NativeAudioInput is not supported in the current configuration.");
#endif
        }
Пример #6
0
    void Awake()
    {
        // create a chuck
        myChuckId = Chuck.Manager.InitializeFilter();

        // initialize my buffer
        int numBuffers;

        AudioSettings.GetDSPBufferSize(out myBufferLength, out numBuffers);
        myOutBuffer = new float[myBufferLength * myNumChannels];

        // setup group for reliable ordering
        mySource = GetComponent <AudioSource>();
        mySource.outputAudioMixerGroup = Chuck.FindAudioMixerGroup("ChuckMainInstanceDestination");

        // setup mic
        if (useMicrophone)
        {
            SetupMic();
        }

        // has init
        hasInit = true;

        // when scene is unloaded, check whether we need to clear the chuck
        SceneManager.sceneUnloaded += OnSceneUnloaded;

        // don't delete me?
        if (persistToNextScene)
        {
            DontDestroyOnLoad(this.gameObject);
        }
    }
    void OnEnable()
    {
        int bufferLength = 0;
        int numBuffers   = 0;

        AudioSettings.GetDSPBufferSize(out bufferLength, out numBuffers);

#if UNITY_5
        _numChannels = GetNumChannels(AudioSettings.driverCapabilities);
        if (AudioSettings.speakerMode != AudioSpeakerMode.Raw &&
            AudioSettings.speakerMode < AudioSettings.driverCapabilities)
        {
            _numChannels = GetNumChannels(AudioSettings.speakerMode);
        }
        Debug.Log(string.Format("[AVProUnityAudiocapture] SampleRate: {0}hz SpeakerMode: {1} BestDriverMode: {2} (DSP using {3} buffers of {4} bytes using {5} channels)", AudioSettings.outputSampleRate, AudioSettings.speakerMode.ToString(), AudioSettings.driverCapabilities.ToString(), numBuffers, bufferLength, _numChannels));
#else
        _numChannels = GetNumChannels(AudioSettings.driverCaps);
        if (AudioSettings.speakerMode != AudioSpeakerMode.Raw &&
            AudioSettings.speakerMode < AudioSettings.driverCaps)
        {
            _numChannels = GetNumChannels(AudioSettings.speakerMode);
        }

        Debug.Log(string.Format("[AVProUnityAudiocapture] SampleRate: {0}hz SpeakerMode: {1} BestDriverMode: {2} (DSP using {3} buffers of {4} bytes using {5} channels)", AudioSettings.outputSampleRate, AudioSettings.speakerMode.ToString(), AudioSettings.driverCaps.ToString(), numBuffers, bufferLength, _numChannels));
#endif

        _buffer        = new float[bufferLength * _numChannels * numBuffers * BufferSize];
        _readBuffer    = new float[bufferLength * _numChannels * numBuffers * BufferSize];
        _bufferIndex   = 0;
        _bufferHandle  = GCHandle.Alloc(_readBuffer, GCHandleType.Pinned);
        _overflowCount = 0;
    }
Пример #8
0
        private void PaintValues()
        {
            LookSpeedSlider.value = ConfigState.Instance.LookSpeed;
            LookYToggle.isOn      = ConfigState.Instance.LookInvert;

            GraphicsQualitySlider.maxValue     = QualitySettings.names.Length - 1;
            GraphicsQualitySlider.value        = QualitySettings.GetQualityLevel();
            GraphicsQualitySlider.interactable = true;

            AntialiasingQualitySlider.value = ConfigState.Instance.AntialiasingQuality;
            ViewDistanceSlider.value        = ConfigState.Instance.ViewDistance;

            SoundVolumeSlider.value = ConfigState.Instance.SoundVolume;
            MusicVolumeSlider.value = ConfigState.Instance.MusicVolume;

            var cList = new List <string>(Enum.GetNames(typeof(AudioSpeakerMode)));

            ChannelDropdown.ClearOptions();
            ChannelDropdown.AddOptions(cList);
            ChannelDropdown.value = cList.IndexOf(AudioSettings.GetConfiguration().speakerMode.ToString());

            var iList = MappedInput.AvailableMappers.ToList();

            InputDropdown.ClearOptions();
            InputDropdown.AddOptions(iList);
            InputDropdown.value = iList.IndexOf(ConfigState.Instance.InputMapper);

            //handle subpanels
            foreach (var subpanel in PanelContainer.GetComponentsInChildren <ConfigSubpanelController>())
            {
                subpanel.PaintValues();
            }
        }
Пример #9
0
    // private uint ksmpsIndex = 0;

    #endregion PRIVATE_FIELDS


    private void Awake()
    {
        if (csoundUnityGameObject)
        {
            csoundUnity = csoundUnityGameObject.GetComponent <CsoundUnity>();
            if (!csoundUnity)
            {
                Debug.LogError("CsoundUnity was not found?");
            }
        }

        AudioSettings.GetDSPBufferSize(out bufferSize, out numBuffers);

        audioSource = GetComponent <AudioSource>();
        if (!audioSource)
        {
            Debug.LogError("AudioSource was not found?");
        }

        audioSource.velocityUpdateMode = AudioVelocityUpdateMode.Fixed;
        audioSource.spatialBlend       = 1.0f;

        if (selectedAudioChannelIndexByChannel == null)
        {
            selectedAudioChannelIndexByChannel = new int[2];
        }
        // TODO: force doppler level of the AudioSource to 0, to avoid audio artefacts ?
        // audioSource.dopplerLevel = 0;
    }
Пример #10
0
    private void Awake()
    {
        // Load Audio Settings from PlayerPrefs and add it to an object of type AudioSettings.
        AudioSettings myAudioSettings = new AudioSettings();

        myAudioSettings = SaveHandler <AudioSettings> .Load(SaveHandler <AudioSettings> .SaveFileName.audioSettings);


        // Checks if there's an audiosettings at all in playerprefs. if there isnt, set all volume to 100% and saves that to PlayerPrefs

        if (myAudioSettings == null)
        {
            myAudioSettings = new AudioSettings {
                vfxVolume = 1f
            };



            SaveHandler <AudioSettings> .Save(myAudioSettings, SaveHandler <AudioSettings> .SaveFileName.audioSettings);
        }



        //Debug.Log("vfx volume " + myAudioSettings.vfxVolume);

        foreach (Sound s in sounds)
        {
            s.source        = gameObject.AddComponent <AudioSource>();
            s.source.clip   = s.clip;
            s.source.volume = s.volume * myAudioSettings.vfxVolume;
            s.source.pitch  = s.pitch;
            s.source.loop   = s.loop;
        }
    }
Пример #11
0
    // Use this for initialization
    void Start()
    {
        AudioConfiguration audio_config = AudioSettings.GetConfiguration();

        AudioSettings.Reset(audio_config);
        this.c.Play();
    }
Пример #12
0
    //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    //	* Derived Method: Start
    //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    void Start()
    {
        // Set DSP Buffer Size
#if !UNITY_EDITOR && !UNITY_STANDALONE_WIN && !UNITY_STANDALONE_OSX
        var audioSettings = AudioSettings.GetConfiguration();
        audioSettings.dspBufferSize /= 2;
        AudioSettings.Reset(audioSettings);
#endif
        Instance = this;

        // Grab all Audio Sources already existing on this object
        sm_aAudioSource.Clear();
        AudioSource[] acquiredAudioSources = this.gameObject.GetComponents <AudioSource>();
        if (acquiredAudioSources.Length > m_iTotalAudioSourcesCount)
        {
            m_iTotalAudioSourcesCount = acquiredAudioSources.Length;
        }

        // Add Existing To List
        for (int i = 0; i < acquiredAudioSources.Length; ++i)
        {
            sm_aAudioSource.Add(new AudioHandlerManager(acquiredAudioSources[i]));
        }

        // Create the rest of the AudioSources up until the desired amount exist. Or stop if we already have more than required.
        for (int i = sm_aAudioSource.Count; i < m_iTotalAudioSourcesCount; ++i)
        {
            sm_aAudioSource.Add(new AudioHandlerManager(CreateAudioSource()));
        }
    }
Пример #13
0
    /// <summary>
    /// Call this function to create geometry handle
    /// </summary>
    void CreatePropagationGeometry()
    {
        AudioConfiguration config = AudioSettings.GetConfiguration();

        // Create Geometry
        if (PropIFace.CreateAudioGeometry(out geometryHandle) != OSPSuccess)
        {
            throw new Exception("Unable to create geometry handle");
        }

        // Upload Mesh
        if (filePath != null && filePath.Length != 0 && fileEnabled && Application.isPlaying)
        {
            if (!ReadFile())
            {
                Debug.Log("Failed to read file, attempting to regenerate audio geometry");

                // We should not try to upload data dynamically if data already exists
                UploadGeometry();
            }
        }
        else
        {
            UploadGeometry();
        }
    }
Пример #14
0
        /// <summary>
        /// Apply the current ConfigState configuration to the game
        /// </summary>
        public void ApplyConfiguration()
        {
            //AUDIO CONFIG
            AudioListener.volume = ConfigState.Instance.SoundVolume;
            var ac = AudioSettings.GetConfiguration();

#if UNITY_WSA
            if (ConfigState.Instance.SpeakerMode == AudioSpeakerMode.Raw)
            {
                ConfigState.Instance.SpeakerMode = AudioSpeakerMode.Stereo;
            }
#endif
            ac.speakerMode = ConfigState.Instance.SpeakerMode;
            AudioSettings.Reset(ac);

            //VIDEO CONFIG
            if (ConfigState.Instance.UseCustomVideoSettings)
            {
                ApplyExtendedGraphicsConfiguration();
            }

            if (!CoreParams.IsEditor)
            {
                Screen.SetResolution(ConfigState.Instance.Resolution.x, ConfigState.Instance.Resolution.y, ConfigState.Instance.FullScreen, ConfigState.Instance.RefreshRate);
            }

            QualitySettings.vSyncCount  = ConfigState.Instance.VsyncCount;
            Application.targetFrameRate = ConfigState.Instance.MaxFrames;

            //INPUT CONFIG
            MappedInput.SetMapper(ConfigState.Instance.InputMapper); //safe?

            //let other things handle it on their own
            QdmsMessageBus.Instance.PushBroadcast(new ConfigChangedMessage());
        }
Пример #15
0
    /// Initializes the audio system with the current audio configuration.
    /// @note This should only be called from the main Unity thread.
    public static void Initialize(GvrAudioListener listener, Quality quality)
    {
        if (!initialized)
        {
            // Initialize the audio system.
            AudioConfiguration config = AudioSettings.GetConfiguration();
            sampleRate      = config.sampleRate;
            numChannels     = (int)config.speakerMode;
            framesPerBuffer = config.dspBufferSize;
            if (numChannels != (int)AudioSpeakerMode.Stereo)
            {
                Debug.LogError("Only 'Stereo' speaker mode is supported by GVR Audio.");
                return;
            }
            Initialize((int)quality, sampleRate, numChannels, framesPerBuffer);
            listenerTransform = listener.transform;

            initialized = true;
        }
        else if (listener.transform != listenerTransform)
        {
            Debug.LogError("Only one GvrAudioListener component is allowed in the scene.");
            GvrAudioListener.Destroy(listener);
        }
    }
Пример #16
0
        private void PaintValues()
        {
            InputMappers = MappedInput.AvailableMappers.ToList();
            InputDropdown.ClearOptions();
            InputDropdown.AddOptions(InputMappers.Select(m => Sub.Replace(m, "CFG_MAPPERS")).ToList());
            int iIndex = InputMappers.IndexOf(ConfigState.Instance.InputMapper);

            InputDropdown.value = iIndex >= 0 ? iIndex : 0;
            ConfigureInputButton.interactable = iIndex >= 0; //enable configure button

            LookSpeedSlider.value = ConfigState.Instance.LookSpeed;
            LookYToggle.isOn      = ConfigState.Instance.LookInvert;

            //Resolutions = new List<Resolution>(Screen.resolutions);
            Resolutions = GetDeduplicatedResolutionList(Screen.resolutions);
            ResolutionDropdown.ClearOptions();
            ResolutionDropdown.AddOptions(Resolutions.Select(r => $"{r.x} x {r.y}").ToList());
            int rIndex = Resolutions.IndexOf(ConfigState.Instance.Resolution);

            ResolutionDropdown.value = rIndex > 0 ? rIndex : Resolutions.Count - 1;

            FullscreenToggle.isOn = ConfigState.Instance.FullScreen;
            FramerateSlider.value = Math.Max(0, ConfigState.Instance.MaxFrames);
            VsyncSlider.value     = ConfigState.Instance.VsyncCount;

            GraphicsQualitySlider.maxValue     = QualitySettings.names.Length - 1;
            GraphicsQualitySlider.value        = QualitySettings.GetQualityLevel();
            GraphicsQualitySlider.interactable = true;

            AntialiasingQualitySlider.value = ConfigState.Instance.AntialiasingQuality;
            ViewDistanceSlider.value        = ConfigState.Instance.ViewDistance;
            FovSlider.value         = Mathf.RoundToInt(ConfigState.Instance.FieldOfView);
            EffectDwellSlider.value = Mathf.RoundToInt(ConfigState.Instance.EffectDwellTime);

            ShowFpsToggle.isOn = ConfigState.Instance.ShowFps;

            SoundVolumeSlider.value = ConfigState.Instance.SoundVolume;
            MusicVolumeSlider.value = ConfigState.Instance.MusicVolume;

            var cList = new List <string>(Enum.GetNames(typeof(AudioSpeakerMode)));

            ChannelDropdown.ClearOptions();
            ChannelDropdown.AddOptions(cList);
            ChannelDropdown.value = cList.IndexOf(AudioSettings.GetConfiguration().speakerMode.ToString());


            //handle subpanels
            foreach (var subpanel in PanelContainer.GetComponentsInChildren <ConfigSubpanelController>())
            {
                try
                {
                    subpanel.PaintValues();
                }
                catch (Exception e)
                {
                    Debug.LogError($"Failed to paint values for subpanel \"{subpanel.name}\"");
                    Debug.LogException(e);
                }
            }
        }
Пример #17
0
 private void load_shared_settings_filter(ConfigNode configNode, AudioSettings audioSettings)
 {
     if (configNode.name == "CHORUS")
     {
         load_shared_settings_filter(configNode, audioSettings.chorus_filter);
     }
     else if (configNode.name == "DISTORTION")
     {
         load_shared_settings_filter(configNode, audioSettings.distortion_filter);
     }
     else if (configNode.name == "ECHO")
     {
         load_shared_settings_filter(configNode, audioSettings.echo_filter);
     }
     else if (configNode.name == "HIGHPASS")
     {
         load_shared_settings_filter(configNode, audioSettings.highpass_filter);
     }
     else if (configNode.name == "LOWPASS")
     {
         load_shared_settings_filter(configNode, audioSettings.lowpass_filter);
     }
     else if (configNode.name == "REVERB")
     {
         load_shared_settings_filter(configNode, audioSettings.reverb_filter);
     }
 }
Пример #18
0
            public bool LoadGame(string gamePath)
            {
                GameInfo gameInfo = LoadGameInfo(gamePath);
                bool     ret      = Libretro.RetroLoadGame(ref gameInfo);

                Console.WriteLine("\nSystem information:");

                _av = new SystemAVInfo();
                Libretro.RetroGetSystemAVInfo(ref _av);

                var audioConfig = AudioSettings.GetConfiguration();

                audioConfig.sampleRate = (int)_av.timing.sample_rate;
                AudioSettings.Reset(audioConfig);

                Debug.Log("Geometry:");
                Debug.Log("Base width: " + _av.geometry.base_width);
                Debug.Log("Base height: " + _av.geometry.base_height);
                Debug.Log("Max width: " + _av.geometry.max_width);
                Debug.Log("Max height: " + _av.geometry.max_height);
                Debug.Log("Aspect ratio: " + _av.geometry.aspect_ratio);
                Debug.Log("Geometry:");
                Debug.Log("Target fps: " + _av.timing.fps);
                Debug.Log("Sample rate " + _av.timing.sample_rate);
                return(ret);
            }
Пример #19
0
    private void SetupMic()
    {
        // default device
        myMicDevice = "";
        // try to find one that matches identifier
        if (microphoneIdentifier != "")
        {
            foreach (string device in Microphone.devices)
            {
                if (device.Contains(microphoneIdentifier))
                {
                    myMicDevice = device;
                }
            }
        }

        // make a clip that loops recording when it reaches the end, is 10 seconds long, and uses the project sample rate
        micClip = Microphone.Start(myMicDevice, true, 10, AudioSettings.GetConfiguration().sampleRate);

        mySource.clip = micClip;
        // also loop the audio source
        mySource.loop = true;
        // high priority!
        mySource.priority = 0;
        // wait for mic to start
        while (!(Microphone.GetPosition(myMicDevice) > 0))
        {
        }
        ;
        // play audio source!
        mySource.Play();
    }
Пример #20
0
 private void Start()
 {
     playerStats = GetComponent <PlayerStats>();
     gm          = FindObjectOfType <GameManager>();
     gm.UpdateUIAmmoText(weapons[selectedWeapon].GetComponent <WeaponStats>());
     audioSettings = FindObjectOfType <AudioSettings>();
 }
Пример #21
0
    public void changeGlobalVolume(Slider slider)
    {
        AudioConfiguration config = AudioSettings.GetConfiguration();

        config.dspBufferSize = (int)slider.value;
        AudioSettings.Reset(config);
    }
Пример #22
0
    public static AudioSource CreateOneShot(AudioClip clip, Vector3 position, Transform parent, float volume, float pitch, float minDistance, float maxDistance, bool localSpace)
    {
        // Create source
        AudioSource source = CreateSource();

        source.clip            = clip;
        source.volume          = volume;
        source.pitch           = pitch;
        source.maxDistance     = maxDistance;
        source.minDistance     = minDistance;
        source.gameObject.name = clip.name;
        source.spatialBlend    = 1.0f;

        source.transform.parent = parent;
        if (localSpace)
        {
            source.transform.localPosition = position;
        }
        else
        {
            source.transform.position = position;
        }

        source.Play();

        // Register it
        AudioSettings.RegisterOneShot(source);

        return(source);
    }
        // Message sent by PhotonVoiceRecorder
        void PhotonVoiceCreated(Recorder.PhotonVoiceCreatedParams p)
        {
            var localVoice = p.Voice;

            if (localVoice.Info.Channels != 1)
            {
                throw new Exception("WebRTCAudioProcessor: only mono audio signals supported.");
            }
            if (!(localVoice is Voice.LocalVoiceAudioShort))
            {
                throw new Exception("WebRTCAudioProcessor: only short audio voice supported (Set PhotonVoiceRecorder.TypeConvert option).");
            }
            var v = (Voice.LocalVoiceAudioShort)localVoice;

            // can't access the AudioSettings properties in InitAEC if it's called from not main thread
            this.reverseChannels = new Dictionary <AudioSpeakerMode, int>()
            {
                { AudioSpeakerMode.Raw, 0 },
                { AudioSpeakerMode.Mono, 1 },
                { AudioSpeakerMode.Stereo, 2 },
                { AudioSpeakerMode.Quad, 4 },
                { AudioSpeakerMode.Surround, 5 },
                { AudioSpeakerMode.Mode5point1, 6 },
                { AudioSpeakerMode.Mode7point1, 8 },
                { AudioSpeakerMode.Prologic, 0 },
            }[AudioSettings.speakerMode];
            int playBufSize;
            int playBufNum;

            AudioSettings.GetDSPBufferSize(out playBufSize, out playBufNum);
            proc = new Voice.WebRTCAudioProcessor(new Voice.Unity.Logger(), localVoice.Info.FrameSize, localVoice.Info.SamplingRate, localVoice.Info.Channels, AudioSettings.outputSampleRate, this.reverseChannels);
            v.AddPostProcessor(proc);
            Debug.Log("WebRTCAudioDSP initialized.");
        }
Пример #24
0
    void Update()
    {
        if (useMicrophone != prevUseMicrophone)
        {
            prevUseMicrophone = useMicrophone;
            if (useMicrophone)
            {
                foreach (string m in Microphone.devices)
                {
                    device = m;
                    break;
                }

                source   = GetComponent <AudioSource>();
                prevClip = source.clip;
                source.Stop();
                source.clip = Microphone.Start(null, true, 1, AudioSettings.outputSampleRate);
                source.Play();

                int dspBufferSize, dspNumBuffers;
                AudioSettings.GetDSPBufferSize(out dspBufferSize, out dspNumBuffers);

                source.timeSamples = (Microphone.GetPosition(device) + AudioSettings.outputSampleRate - 3 * dspBufferSize * dspNumBuffers) % AudioSettings.outputSampleRate;
            }
            else
            {
                Microphone.End(device);
                source.clip = prevClip;
                source.Play();
            }
        }
    }
Пример #25
0
 void _SetVolume(string key, float value)
 {
     if (AudioSettings.ContainsTypeKey(key))
     {
         audioMixer.SetFloat(key, value);
     }
 }
    /// <summary>
    /// Change default audio settings
    /// </summary>
    private static void AudioSettingsSetup()
    {
        // Change default speaker mode from Stereo to Quad
        AudioConfiguration audioConfiguration = AudioSettings.GetConfiguration();

        audioConfiguration.speakerMode = AudioSpeakerMode.Quad;
        AudioSettings.Reset(audioConfiguration);
    }
Пример #27
0
        public void ClAppMidiBundleBuild(RpcCallback callback)
        {
            var audioConfig = AudioSettings.GetConfiguration();

            Rpc("ClAppMidiBundleBuild", new JsonObj()
            {
            }, callback);
        }
Пример #28
0
    private int getBufferSize()
    {
        int bufferlength,
            numbuffers;

        AudioSettings.GetDSPBufferSize(out bufferlength, out numbuffers);
        return(bufferlength);
    }
Пример #29
0
        private void SetAudioDSP()
        {
            //Pull DSP setting from config
            var configuration = AudioSettings.GetConfiguration();

            configuration.dspBufferSize = NSSettings.config.audioDSP;
            AudioSettings.Reset(configuration);
        }
Пример #30
0
 void Start()
 {
     layerMask     = ~layerMask;
     clickEvents   = GetComponent <ClickEvents>();
     m_Raycaster   = GetComponent <GraphicRaycaster>();
     m_EventSystem = GetComponent <EventSystem>();
     audioSettings = FindObjectOfType <AudioSettings>();
 }
		bool PrepareAudioRecording ()
		{
			audioFilePath = CreateOutputUrl ();

			var audioSettings = new AudioSettings {
				SampleRate = 44100,
				Format = AudioToolbox.AudioFormatType.MPEG4AAC,
				NumberChannels = 1,
				AudioQuality = AVAudioQuality.High
			};

			//Set recorder parameters
			NSError error;
			recorder = AVAudioRecorder.Create (audioFilePath, audioSettings, out error);
			if (error != null) {
				Console.WriteLine (error);
				return false;
			}

			//Set Recorder to Prepare To Record
			if (!recorder.PrepareToRecord ()) {
				recorder.Dispose ();
				recorder = null;
				return false;
			}

			recorder.FinishedRecording += OnFinishedRecording;

			return true;
		}
        bool PrepareAudioRecording()
        {
            //Declare string for application temp path and tack on the file extension
            string fileName = string.Format("Myfile{0}.aac", DateTime.Now.ToString("yyyyMMddHHmmss"));
            string tempRecording = NSBundle.MainBundle.BundlePath + "/../tmp/" + fileName;

            Console.WriteLine(tempRecording);
            this.audioFilePath = NSUrl.FromFilename(tempRecording);

            var audioSettings = new AudioSettings()
            {
                SampleRate = 44100.0f,
                Format = MonoTouch.AudioToolbox.AudioFormatType.MPEG4AAC,
                NumberChannels = 1,
                AudioQuality = AVAudioQuality.High
            };

            //Set recorder parameters
            NSError error;
            recorder = AVAudioRecorder.Create(this.audioFilePath, audioSettings, out error);
            if ((recorder == null) || (error != null))
            {
                Console.WriteLine(error);
                return false;
            }

            //Set Recorder to Prepare To Record
            if (!recorder.PrepareToRecord())
            {
                recorder.Dispose();
                recorder = null;
                return false;
            }

            recorder.FinishedRecording += delegate (object sender, AVStatusEventArgs e)
            {
                recorder.Dispose();
                recorder = null;
                Console.WriteLine("Done Recording (status: {0})", e.Status);
            };

            return true;
        }
Пример #33
0
 public void ParseAudio(AudioSettings au)
 {
     var aud = statedyn["audio"];
     au.MusicVolume = (float?) aud["music_volume"] ?? 0;
     au.SoundVolume = (float?) aud["sound_volume"] ?? 0;
     au.UseEffects = (bool?) aud["use_effects"] ?? false;
     au.ListenerIsPlayer = (bool?) aud["listener_is_player"] ?? false;
     au.StreamBufferSize = (int?) aud["stream_buffer_size"] * 1024 ?? 0;
     if (au.StreamBufferSize <= 0)
         au.StreamBufferSize = 131072; // 128 * 1024
 }
		public CustomAudioDevice(Context context)
		{
			if (!InstanceFieldsInitialized)
			{
				InitializeInstanceFields();
				InstanceFieldsInitialized = true;
			}
			this.m_context = context;

			try
			{
				m_playBuffer = ByteBuffer.allocateDirect(MAX_SAMPLES);
				m_recBuffer = ByteBuffer.allocateDirect(MAX_SAMPLES);
			}
			catch (Exception e)
			{
				Log.e(LOG_TAG, e.Message);
			}

			m_tempBufPlay = new sbyte[MAX_SAMPLES];
			m_tempBufRec = new sbyte[MAX_SAMPLES];

			m_captureSettings = new AudioSettings(SAMPLING_RATE, NUM_CHANNELS_CAPTURING);
			m_rendererSettings = new AudioSettings(SAMPLING_RATE, NUM_CHANNELS_RENDERING);

			m_audioManager = (AudioManager) m_context.getSystemService(Context.AUDIO_SERVICE);

			m_audioManager.Mode = AudioManager.MODE_IN_COMMUNICATION;
		}
        /// <summary>
        /// Views the did load.
        /// </summary>
        public override void ViewDidLoad()
        {
            base.ViewDidLoad ();

            //
             m_navigationTitle = @"Audio Recorder";

            //
            this.View.TintColor = NormalTintColor;
            mMusicFlowView.BackgroundColor = this.View.BackgroundColor;
            mMusicFlowView.IdleAmplitude = 0;

            //Unique recording URL
            var fileName = NSProcessInfo.ProcessInfo.GloballyUniqueString;

            var documents = Environment.GetFolderPath (Environment.SpecialFolder.MyDocuments);
            var tmp = Path.Combine (documents, "..", "tmp");

            m_recordingFilePath = Path.Combine(tmp,String.Format("{0}.m4a",fileName));
             {

                m_flexItem1 = new UIBarButtonItem(UIBarButtonSystemItem.FlexibleSpace,null,null);
                m_flexItem2 = new UIBarButtonItem(UIBarButtonSystemItem.FlexibleSpace,null,null);

                var img = UIImage.FromBundle("audio_record");

                m_recordButton = new UIBarButtonItem(img,UIBarButtonItemStyle.Plain,RecordingButtonAction);
                m_playButton = new UIBarButtonItem(UIBarButtonSystemItem.Play,PlayAction);
                m_pauseButton = new UIBarButtonItem(UIBarButtonSystemItem.Pause,PauseAction);
                m_trashButton = new UIBarButtonItem(UIBarButtonSystemItem.Trash,DeleteAction);

                this.SetToolbarItems (new UIBarButtonItem[]{ m_playButton, m_flexItem1, m_recordButton, m_flexItem2, m_trashButton}, false);

                 m_playButton.Enabled = false;
                 m_trashButton.Enabled = false;
             }

             // Define the recorder setting
             {
                var audioSettings = new AudioSettings () {
                    Format = AudioFormatType.MPEG4AAC,
                    SampleRate = 44100.0f,
                    NumberChannels = 2,
                };

                NSError err = null;

                m_audioRecorder = AVAudioRecorder.Create (NSUrl.FromFilename (m_recordingFilePath), audioSettings,out err);

                // Initiate and prepare the recorder
                m_audioRecorder.WeakDelegate = this;
                m_audioRecorder.MeteringEnabled = true;

                mMusicFlowView.PrimaryWaveLineWidth = 3.0f;
                mMusicFlowView.SecondaryWaveLineWidth = 1.0f;
             }

             //Navigation Bar Settings
             {
                this.NavigationItem.Title = @"Audio Recorder";
                m_cancelButton = new UIBarButtonItem(UIBarButtonSystemItem.Cancel,CancelAction);
                this.NavigationItem.LeftBarButtonItem = m_cancelButton;

                m_doneButton = new UIBarButtonItem(UIBarButtonSystemItem.Done, DoneAction);
             }

             //Player Duration View
            {
                m_viewPlayerDuration = new UIView ();
                m_viewPlayerDuration.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
                m_viewPlayerDuration.BackgroundColor = UIColor.Clear;

                m_labelCurrentTime = new UILabel ();
                m_labelCurrentTime.Text = NSStringExtensions.TimeStringForTimeInterval (0);
                m_labelCurrentTime.Font =  UIFont.BoldSystemFontOfSize(14.0f);
                m_labelCurrentTime.TextColor = NormalTintColor;
                m_labelCurrentTime.TranslatesAutoresizingMaskIntoConstraints = false;

                m_playerSlider = new UISlider(new CGRect(0, 0, this.View.Bounds.Size.Width, 64));
                 m_playerSlider.MinimumTrackTintColor = PlayingTintColor;
                 m_playerSlider.Value = 0;

                m_playerSlider.TouchDown += SliderStart;
                m_playerSlider.ValueChanged += SliderMoved;
                m_playerSlider.TouchUpInside += SliderEnd;
                m_playerSlider.TouchUpOutside += SliderEnd;
                 m_playerSlider.TranslatesAutoresizingMaskIntoConstraints = false;

                m_labelRemainingTime = new UILabel();
                m_labelCurrentTime.Text = NSStringExtensions.TimeStringForTimeInterval (0);
                 m_labelRemainingTime.UserInteractionEnabled = true;
                m_labelRemainingTime.AddGestureRecognizer (new UITapGestureRecognizer(TapRecognizer));
                m_labelRemainingTime.Font = m_labelCurrentTime.Font;
                 m_labelRemainingTime.TextColor = m_labelCurrentTime.TextColor;
                 m_labelRemainingTime.TranslatesAutoresizingMaskIntoConstraints = false;

                m_viewPlayerDuration.Add (m_labelCurrentTime);
                m_viewPlayerDuration.Add (m_playerSlider);
                m_viewPlayerDuration.Add (m_labelRemainingTime);

                NSLayoutConstraint constraintCurrentTimeLeading = NSLayoutConstraint.Create (m_labelCurrentTime,NSLayoutAttribute.Leading,NSLayoutRelation.Equal,m_viewPlayerDuration,NSLayoutAttribute.Leading,1.0f, 10.0f);
                NSLayoutConstraint constraintCurrentTimeTrailing =  NSLayoutConstraint.Create (m_playerSlider,NSLayoutAttribute.Leading,NSLayoutRelation.Equal,m_labelCurrentTime,NSLayoutAttribute.Trailing,1.0f,10);

                NSLayoutConstraint constraintRemainingTimeLeading =  NSLayoutConstraint.Create (m_labelRemainingTime,NSLayoutAttribute.Leading,NSLayoutRelation.Equal,m_playerSlider,NSLayoutAttribute.Trailing,1.0f, 10.0f);
                NSLayoutConstraint constraintRemainingTimeTrailing =  NSLayoutConstraint.Create (m_viewPlayerDuration,NSLayoutAttribute.Trailing,NSLayoutRelation.Equal,m_labelRemainingTime,NSLayoutAttribute.Trailing,1.0f,10.0f);

                NSLayoutConstraint constraintCurrentTimeCenter = NSLayoutConstraint.Create (m_labelCurrentTime,NSLayoutAttribute.CenterY,NSLayoutRelation.Equal,m_viewPlayerDuration,NSLayoutAttribute.CenterY,1.0f,0.0f);

                NSLayoutConstraint constraintSliderCenter = NSLayoutConstraint.Create (m_playerSlider,NSLayoutAttribute.CenterY,NSLayoutRelation.Equal,m_viewPlayerDuration,NSLayoutAttribute.CenterY,1.0f,0.0f);

                NSLayoutConstraint constraintRemainingTimeCenter = NSLayoutConstraint.Create (m_labelRemainingTime,NSLayoutAttribute.CenterY,NSLayoutRelation.Equal,m_viewPlayerDuration,NSLayoutAttribute.CenterY,1.0f,0.0f);

                m_viewPlayerDuration.AddConstraints(new NSLayoutConstraint[]{constraintCurrentTimeLeading,constraintCurrentTimeTrailing,constraintRemainingTimeLeading,constraintRemainingTimeTrailing,constraintCurrentTimeCenter,constraintSliderCenter,constraintRemainingTimeCenter});

            }
        }