/// <summary> /// Load a content from a asset file name /// </summary> /// <param name="assetFileName">Asset name, relative to the loader root directory, and including the file extension.</param> /// <returns>Return a Configuration file instance corresponding to the asset file name</returns> public override object Load(string assetFileName) { AudioConfiguration config = null; if (assetFileName.Length > 4 && assetFileName[assetFileName.Length - 4] == '.') // need to have 5 char mini x.xxx and a point before the format suffix. { //get the 4 last char string fileFormat = assetFileName.Substring(assetFileName.Length - 3, 3).ToUpper(); if (this._supportFormat.Contains(fileFormat)) { try { config = SerializerHelper.Load <AudioConfiguration>(assetFileName); } catch { throw; } } else { throw new ContentLoadException(typeof(AudioCfgContentResolver).FullName, string.Format(Pulsar.Resources.Exceptions.ContentManager.FormatNotSupportByResolver, assetFileName, typeof(AudioCfgContentResolver).FullName)); } } else { throw new ContentLoadException(typeof(AudioCfgContentResolver).FullName, string.Format(Pulsar.Resources.Exceptions.ContentManager.FileNameIncorrect, assetFileName)); } return(config); }
void FinishStartup() { if (_log.IsDebugEnabled) { _log.DebugFormat("Starting {0}", MethodBase.GetCurrentMethod().ToString()); } ModManagement.DoInitialSetup(); //while (ModManagement.SetupInProgress) //{ // System.Threading.Thread.Sleep(100); //} string path = System.IO.Path.Combine(Locations.DataPath, "audio.ini"); AudioConfiguration audioConfig = INIConverter.ToObject(path, typeof(AudioConfiguration)) as AudioConfiguration; if (audioConfig.StartupMusic) { string playfile = Path.Combine(Locations.ArtemisCopyPath, "dat", "Artemis Main Screen.ogg"); if (File.Exists(playfile)) { RussLibraryAudio.AudioServer.Current.PlayAsync(playfile); } } this.Dispatcher.BeginInvoke(new Action(ModManagement.UpdateCheckProcess), System.Windows.Threading.DispatcherPriority.Loaded); this.Dispatcher.BeginInvoke(new Action <AudioConfiguration>(LoadAudioData), audioConfig); this.Dispatcher.BeginInvoke(new Action(DoStartup)); if (_log.IsDebugEnabled) { _log.DebugFormat("Ending {0}", MethodBase.GetCurrentMethod().ToString()); } }
/// Initializes the audio system with the current audio configuration. /// @note This should only be called from the main Unity thread. public static void Initialize(CardboardAudioListener listener, Quality quality) { if (!initialized) { // Initialize the audio system. #if UNITY_4_5 || UNITY_4_6 || UNITY_4_7 sampleRate = AudioSettings.outputSampleRate; numChannels = (int)AudioSettings.speakerMode; int numBuffers = -1; AudioSettings.GetDSPBufferSize(out framesPerBuffer, out numBuffers); #else AudioConfiguration config = AudioSettings.GetConfiguration(); sampleRate = config.sampleRate; numChannels = (int)config.speakerMode; framesPerBuffer = config.dspBufferSize; #endif if (numChannels != (int)AudioSpeakerMode.Stereo) { Debug.LogError("Only 'Stereo' speaker mode is supported by Cardboard."); return; } Initialize(quality, sampleRate, numChannels, framesPerBuffer); listenerTransform = listener.transform; initialized = true; Debug.Log("Cardboard audio system is initialized (Quality: " + quality + ", Sample Rate: " + sampleRate + ", Channels: " + numChannels + ", Frames Per Buffer: " + framesPerBuffer + ")."); } else if (listener.transform != listenerTransform) { Debug.LogError("Only one CardboardAudioListener component is allowed in the scene."); //CardboardAudioListener.Destroy(listener); } }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: @Override public TrackStartRequestMessage decode(java.io.DataInput in, int version) throws java.io.IOException public TrackStartRequestMessage decode(DataInput @in, int version) { long executorId = @in.readLong(); AudioTrackInfo trackInfo = new AudioTrackInfo(@in.readUTF(), @in.readUTF(), @in.readLong(), @in.readUTF(), @in.readBoolean(), null); sbyte[] encodedTrack = new sbyte[@in.readInt()]; @in.readFully(encodedTrack); int volume = @in.readInt(); AudioConfiguration configuration = new AudioConfiguration(); configuration.ResamplingQuality = AudioConfiguration.ResamplingQuality.valueOf(@in.readUTF()); configuration.OpusEncodingQuality = @in.readInt(); if (version >= VERSION_WITH_FORMAT) { AudioDataFormat format = new AudioDataFormat(@in.readInt(), @in.readInt(), @in.readInt(), AudioDataFormat.Codec.valueOf(@in.readUTF())); configuration.OutputFormat = format; } long position = 0; if (version >= VERSION_WITH_POSITION) { position = @in.readLong(); } return(new TrackStartRequestMessage(executorId, trackInfo, encodedTrack, volume, configuration, position)); }
public WindowsTextToSpeechProcessor(IConfigurationManager configurationManager) { this.configurationManager = configurationManager; RegisterAvailableLanguages(); audioConfiguration = configurationManager.LoadConfiguration <AudioConfiguration>(); }
// Use this for initialization void Start() { AudioConfiguration config = AudioSettings.GetConfiguration(); config.numVirtualVoices = 6; config.numRealVoices = 2; AudioSettings.Reset(config); controller = FindObjectOfType <FrameworkController>(); source = GetComponent <AudioSource>(); if (controller != null) { source.Play(); } else { Debug.Log("Framework controller not found. Are you starting from MainMenu Scene?"); } SoundSlider.onValueChanged.AddListener(delegate { UpdatePriorities(); }); NoiseSlider.onValueChanged.AddListener(delegate { UpdatePriorities(); }); UpdatePriorities(); PlayAll(); }
private void SetupSurroundSound() { AudioConfiguration audioConfig = AudioSettings.GetConfiguration(); audioConfig.speakerMode = AudioSpeakerMode.Mode5point1; AudioSettings.Reset(audioConfig); }
// public AudioSource c; // Use this for initialization void Start() { AudioConfiguration audio_config = AudioSettings.GetConfiguration(); AudioSettings.Reset(audio_config); // this.c.Play(); }
/// <summary> /// Call this function to create geometry handle /// </summary> void CreatePropagationGeometry() { AudioConfiguration config = AudioSettings.GetConfiguration(); // Create Geometry if (PropIFace.CreateAudioGeometry(out geometryHandle) != OSPSuccess) { throw new Exception("Unable to create geometry handle"); } // Upload Mesh if (filePath != null && filePath.Length != 0 && fileEnabled && Application.isPlaying) { if (!ReadFile()) { Debug.Log("Failed to read file, attempting to regenerate audio geometry"); // We should not try to upload data dynamically if data already exists UploadGeometry(); } } else { UploadGeometry(); } }
public void TestVlcJobGetsAddedToCollection() { var file = TestUtilities.GetTestFile("NeedinYou2SecWav.wav"); var audioConfiguration = new AudioConfiguration { Format = AudioConfiguration.ConversionFormats.Mp3 }; var portAllocator = MockRepository.GenerateMock <IPortAllocator>(); portAllocator.Expect(x => x.NewPort()).Return(42); var job = new VlcAudioJob(audioConfiguration, portAllocator, MockRepository.GenerateMock <IStatusParser>(), MockRepository.GenerateMock <IVlcStatusSource>(), new TimeSouce(), MockRepository.GenerateMock <ILogger>()); Assert.AreEqual(VlcJob.JobState.NotStarted, job.State); job.InputFile = file; var expectedOutputFile = Path.Combine(TestUtilities.GetTestOutputDir(), "output.mp3"); job.OutputFile = new FileInfo(expectedOutputFile); var starter = MockRepository.GenerateMock <IVlcStarter>(); var instance = MockRepository.GenerateMock <IVlcInstance>(); starter.Expect(x => x.Start(Arg <string> .Is.Anything, Arg <FileInfo> .Is.Anything)).Return(instance); var driver = new VlcDriver(starter); TestUtilities.SetVlcExeLocationOnNonStandardWindowsEnvironments(driver); Assert.AreEqual(0, driver.JobBag.Count); Assert.AreEqual(VlcJob.JobState.NotStarted, job.State); driver.StartJob(job); Assert.AreEqual(1, driver.JobBag.Count); Assert.AreEqual(VlcJob.JobState.Started, job.State); instance.Raise(x => x.OnExited += null, instance, new EventArgs()); Assert.AreEqual(1, driver.JobBag.Count); }
void OnGUI() { //storing the audio source in source AudioSource source = GetComponent <AudioSource>(); bool modified = false; AudioConfiguration config = AudioSettings.GetConfiguration(); //reconfiguring the values and appropraitely changing the modified values config.speakerMode = (AudioSpeakerMode)GUIRow("speakerMode", validSpeakerModes, (int)config.speakerMode, ref modified); config.dspBufferSize = GUIRow("dspBufferSize", validDSPBufferSizes, config.dspBufferSize, ref modified); config.sampleRate = GUIRow("sampleRate", validSampleRates, config.sampleRate, ref modified); config.numRealVoices = GUIRow("RealVoices", validNumRealVoices, config.numRealVoices, ref modified); config.numVirtualVoices = GUIRow("numVirtualVoices", validNumVirtualVoices, config.numVirtualVoices, ref modified); //we reset if the values have changed if (modified) { AudioSettings.Reset(config); } //when the button pressed the audio starts playing if (GUILayout.Button("Start")) { source.Play(); } //audio stops playing on pressing the Stop if (GUILayout.Button("Stop")) { source.Stop(); } }
public void Start() { #if UNITY_STANDALONE_OSX _core = new Core(OS.OSX, CorePath, SystemDirectory); #else _core = new Core(OS.Windows, CorePath, SystemDirectory); #endif _core.AudioSampleBatchHandler += audioSampleBatchHandler; _core.LogHandler += logHandler; _core.VideoFrameHandler += videoFrameHandler; _core.Load(ROMPath); Time.fixedDeltaTime = (float)1 / (float)_core.FrameRate; AudioConfiguration audioConfiguration = AudioSettings.GetConfiguration(); audioConfiguration.sampleRate = (int)_core.AudioSampleRate; AudioSettings.Reset(audioConfiguration); // this is required for OnAudioFilterRead to work and needs to be done after setting the AudioSettings.outputSampleRate gameObject.AddComponent <AudioSource>(); _core.StartFrameTiming(); }
public void changeGlobalVolume(Slider slider) { AudioConfiguration config = AudioSettings.GetConfiguration(); config.dspBufferSize = (int)slider.value; AudioSettings.Reset(config); }
/// <summary> /// Starts microphone capture on the first available microphone. /// </summary> /// <returns>True if a microphone was available to capture, otherwise false.</returns> public bool StartMicrophone() { if (microphoneAudioSource == null) { Debug.LogWarning("No AudioSource for microphone audio was specified"); return(false); } if (Microphone.devices.Length == 0) { Debug.LogWarning("No connected microphones detected"); return(false); } int minFreq, maxFreq, reqFreq; Microphone.GetDeviceCaps(Microphone.devices[0], out minFreq, out maxFreq); reqFreq = Mathf.Clamp(DefaultMicrophoneFreq, minFreq, maxFreq); microphoneAudioSource.clip = Microphone.Start(Microphone.devices[0], true, 1, reqFreq); microphoneAudioSource.loop = true; // don't start playing the AudioSource until we have some data (else we get a weird doubleling of audio) StartCoroutine(StartAudioSourceCoroutine()); AudioConfiguration currentConfiguration = AudioSettings.GetConfiguration(); tickTime = (float)currentConfiguration.dspBufferSize / currentConfiguration.sampleRate; isInitialized = true; return(true); }
/// Initializes the audio system with the current audio configuration. /// @note This should only be called from the main Unity thread. public static void Initialize(GvrAudioListener listener, Quality quality) { if (!initialized) { #if !UNITY_EDITOR && UNITY_ANDROID SetApplicationState(); #endif // Initialize the audio system. AudioConfiguration config = AudioSettings.GetConfiguration(); sampleRate = config.sampleRate; numChannels = (int)config.speakerMode; framesPerBuffer = config.dspBufferSize; if (numChannels != (int)AudioSpeakerMode.Stereo) { Debug.LogError("Only 'Stereo' speaker mode is supported by GVR Audio."); return; } Initialize(quality, sampleRate, numChannels, framesPerBuffer); listenerTransform = listener.transform; initialized = true; } else if (listener.transform != listenerTransform) { Debug.LogError("Only one GvrAudioListener component is allowed in the scene."); GvrAudioListener.Destroy(listener); } }
/// Initializes the audio system with the current audio configuration. /// @note This should only be called from the main Unity thread. public static void Initialize(GvrAudioListener listener, Quality quality) { if (!initialized) { // Initialize the audio system. AudioConfiguration config = AudioSettings.GetConfiguration(); sampleRate = config.sampleRate; numChannels = (int)config.speakerMode; framesPerBuffer = config.dspBufferSize; if (numChannels != (int)AudioSpeakerMode.Stereo) { Debug.LogError("Only 'Stereo' speaker mode is supported by GVR Audio."); return; } if (Application.platform != RuntimePlatform.Android) { // TODO: GvrAudio bug on Android with Unity 2017 Initialize((int)quality, sampleRate, numChannels, framesPerBuffer); } listenerTransform = listener.transform; if (Application.platform == RuntimePlatform.Android) { // TODO: GvrAudio bug on Android with Unity 2017 return; } initialized = true; } else if (listener.transform != listenerTransform) { Debug.LogError("Only one GvrAudioListener component is allowed in the scene."); GvrAudioListener.Destroy(listener); } }
//TODO Current changing AudioMode in runtime make crash sound. Waiting for fixed this by Unity team. /// <summary> /// Sets the general audio mode. /// </summary> /// <param name="value">Value of audio speaker mode.</param> public static void SetGeneralAudioMode(int value) { AudioConfiguration config = AudioSettings.GetConfiguration(); switch (value) { case 1: config.speakerMode = AudioSpeakerMode.Mono; break; case 2: config.speakerMode = AudioSpeakerMode.Stereo; break; case 3: config.speakerMode = AudioSpeakerMode.Quad; break; case 4: config.speakerMode = AudioSpeakerMode.Surround; break; case 5: config.speakerMode = AudioSpeakerMode.Mode5point1; break; case 6: config.speakerMode = AudioSpeakerMode.Mode7point1; break; } AudioSettings.Reset(config); }
// Returns true, if everything is done: private bool ManageAudioSettings() { bool allDone = true; EditorGUILayout.BeginHorizontal(); EditorGUILayout.LabelField("Speaker Mode", EditorStyles.label, GUILayout.MaxWidth(_leftLabelMaxWidth)); AudioConfiguration audioConfig = AudioSettings.GetConfiguration(); if (audioConfig.speakerMode == AudioSpeakerMode.Mode5point1) { EditorGUILayout.LabelField("(Surround 5.1)", EditorStyles.label); } else { allDone = false; if (GUILayout.Button("Set to Surround 5.1")) { SetupSurroundSound(); } } EditorGUILayout.EndHorizontal(); return(allDone); }
void setupAudioBuffer() { AudioConfiguration config = AudioSettings.GetConfiguration(); switch (PlayerPrefs.GetInt("audioQuality")) { case 0: config.dspBufferSize = 256; break; case 1: config.dspBufferSize = 512; break; case 2: config.dspBufferSize = 1024; break; default: break; } AudioSource[] sources = FindObjectsOfType <AudioSource>(); for (int i = 0; i < sources.Length; i++) { sources[i].enabled = false; } AudioSettings.Reset(config); for (int i = 0; i < sources.Length; i++) { sources[i].enabled = true; } }
/// <param name="configuration"> Audio encoding or filtering related configuration </param> /// <param name="frameConsumer"> Consumer for the produced audio frames </param> /// <param name="volumeLevel"> Mutable volume level for the audio </param> /// <param name="outputFormat"> Output format to use throughout this processing cycle </param> public AudioProcessingContext(AudioConfiguration configuration, AudioFrameConsumer frameConsumer, AtomicInteger volumeLevel, AudioDataFormat outputFormat) { this.configuration = configuration; this.frameConsumer = frameConsumer; this.volumeLevel = volumeLevel; this.outputFormat = outputFormat; }
/// <summary> /// Change default audio settings /// </summary> private static void AudioSettingsSetup() { // Change default speaker mode from Stereo to Quad AudioConfiguration audioConfiguration = AudioSettings.GetConfiguration(); audioConfiguration.speakerMode = AudioSpeakerMode.Quad; AudioSettings.Reset(audioConfiguration); }
public static void Init() { AudioConfiguration config = AudioSettings.GetConfiguration(); vraudio_unity_context_init( (uint)config.sampleRate, new IntPtr(config.dspBufferSize)); }
/// <param name="audioTrack"> The audio track that this executor executes </param> /// <param name="configuration"> Configuration to use for audio processing </param> /// <param name="volumeLevel"> Mutable volume level to use when executing the track </param> /// <param name="useSeekGhosting"> Whether to keep providing old frames continuing from the previous position during a seek /// until frames from the new position arrive. </param> /// <param name="bufferDuration"> The size of the frame buffer in milliseconds </param> public LocalAudioTrackExecutor(InternalAudioTrack audioTrack, AudioConfiguration configuration, AtomicInteger volumeLevel, bool useSeekGhosting, int bufferDuration) { this.audioTrack = audioTrack; AudioDataFormat currentFormat = configuration.OutputFormat; this.frameBuffer = new AudioFrameBuffer(bufferDuration, currentFormat, isStopping); this.processingContext = new AudioProcessingContext(configuration, frameBuffer, volumeLevel, currentFormat); this.useSeekGhosting = useSeekGhosting; }
void ChangeMonoStereo(bool value) { this.stereoActivated = value; PlayerPrefs.SetInt("stereo_mode", value ? 1 : 0); AudioConfiguration audioConfiguration = AudioSettings.GetConfiguration(); audioConfiguration.speakerMode = stereoActivated ? AudioSpeakerMode.Stereo : AudioSpeakerMode.Mono; AudioSettings.Reset(audioConfiguration); }
/// <param name="executorId"> The ID for the track executor </param> /// <param name="trackInfo"> Generic track information </param> /// <param name="encodedTrack"> Track specific extra information that is required to initialise the track object </param> /// <param name="volume"> Initial volume of the track </param> /// <param name="configuration"> Configuration to use for audio processing </param> /// <param name="position"> Position to start playing at in milliseconds </param> public TrackStartRequestMessage(long executorId, AudioTrackInfo trackInfo, sbyte[] encodedTrack, int volume, AudioConfiguration configuration, long position) { this.executorId = executorId; this.encodedTrack = encodedTrack; this.trackInfo = trackInfo; this.volume = volume; this.configuration = configuration; this.position = position; }
internal override void OnDetached(Player player) { base.OnDetached(player); AudioConfiguration?.OnPlayerUnset(player); VideoConfiguration?.OnPlayerUnset(player); _player = null; }
public static void SetRecommendedAudioSettings() { AudioConfiguration audioConfiguration = AudioSettings.GetConfiguration(); audioConfiguration.dspBufferSize = 0; audioConfiguration.sampleRate = 0; audioConfiguration.speakerMode = MetaSpeakerMode; AudioSettings.Reset(audioConfiguration); }
private void Start() { AudioConfiguration config = AudioSettings.GetConfiguration(); sampling_frequency = config.sampleRate; gain = 0; gain_old = 0; }
void OnAudioConfigurationChanged(bool deviceWasChanged) { AudioConfiguration config = AudioSettings.GetConfiguration(); if (audioReader != null) { audioReader.unityChannelMode = config.speakerMode; } }
internal override void OnDetached(WebRTC webRtc) { NativeWebRTC.RemoveMediaSource(webRtc.Handle, SourceId.Value). ThrowIfFailed("Failed to remove MediaPacketSource."); AudioConfiguration?.OnWebRTCUnset(); VideoConfiguration?.OnWebRTCUnset(); WebRtc = null; }