private void SoundTrackActions(AudioJob job, AudioTrack track) { currentTrack = job.type; switch (job.action) { case AudioAction.Start: track.source.Play(); Invoke(nameof(PlayNextTrack), track.source.clip.length + job.delay); break; case AudioAction.Stop: if (!job.fade) { CancelInvoke(); track.source.Stop(); PlayNextTrack(); } break; case AudioAction.Restart: CancelInvoke(); track.source.Stop(); track.source.Play(); Invoke(nameof(PlayNextTrack), track.source.clip.length + job.delay); break; } }
private IEnumerator RunAudioJob(AudioJob _job) { AudioTrack _track = (AudioTrack)m_AudioTable[_job.type]; _track.source.clip = GetAudioClipFromAudioTrack(_job.type, _track); switch (_job.action) { case AudioAction.START: _track.source.Play(); _track.source.volume = 1; break; case AudioAction.STOP: _track.source.Stop(); break; case AudioAction.RESTART: _track.source.Stop(); _track.source.Play(); _track.source.volume = 1; break; case AudioAction.PAUSE: _track.source.Pause(); break; } m_JobTable.Remove(_job.type); Log("Job count: " + m_JobTable.Count); yield return(null); }
public async Task <byte[]> WebAPICall(AudioJob auj) { //return Encoding.ASCII.GetBytes("test"); byte[] byteRes = null; AudioRepository audioJobRepo = new AudioRepository(); Guid jobID = await audioJobRepo.SubmitWorkItem(auj); while (audioJobRepo.GetWorkStatus(jobID) == 2) { //wait await Task.Delay(200); } if (audioJobRepo.GetWorkStatus(jobID) == 1) { //sucess FileResult result = audioJobRepo.GetResultContents(jobID); byteRes = result.getFileContents(); } else { //fail throw new Exception("Task with job ID: " + jobID + " failed"); } return(byteRes); }
public string verifyAudioSettings() { AudioJob stream = AudioJob; if (stream == null) { return("Audio input, audio output, and audio settings must all be configured"); } string fileErr = MainForm.verifyInputFile(this.AudioInput); if (fileErr != null) { return("Problem with audio input filename:\n" + fileErr); } fileErr = MainForm.verifyOutputFile(this.AudioOutput); if (fileErr != null) { return("Problem with audio output filename:\n" + fileErr); } AudioType aot = this.audioContainer.SelectedItem as AudioType; // test output file extension if (!Path.GetExtension(this.AudioOutput).Replace(".", "").Equals(aot.Extension, StringComparison.InvariantCultureIgnoreCase)) { return("Audio output filename does not have the correct extension.\nBased on current settings, it should be " + aot.Extension); } return(null); }
private IEnumerator RunAudioJob(AudioJob job) { AudioTrack track = (AudioTrack)audioTable[job.type]; track.source.clip = GetAudioClipFromAudioTrack(job.type, track); switch (job.action) { case AudioAction.START: track.source.Play(); break; case AudioAction.STOP: track.source.Stop(); break; case AudioAction.RESTART: track.source.Stop(); track.source.Play(); break; } jobTable.Remove(job.type); Log("Job Count: " + jobTable.Count); yield return(null); }
private void RemoveConflictingJobs(AudioJob _job) { if (!m_JobTable.ContainsKey(_job.type)) { RemoveJob(_job.type); } AudioType _conflictAudio = AudioType.None; foreach (DictionaryEntry _entry in m_JobTable) { AudioType _audioType = (AudioType)_entry.Key; AudioTrack _audioTrackInUse = (AudioTrack)m_AudioTable[_audioType]; AudioTrack _audioTrackNeeded = (AudioTrack)m_AudioTable[_job.type]; if (_audioTrackNeeded.source == _audioTrackInUse.source) { // hay conflicto _conflictAudio = _audioType; } } if (_conflictAudio != AudioType.None) { RemoveJob(_conflictAudio); } }
private IEnumerator RunAudioJob(AudioJob _job) { AudioTrack _track = (AudioTrack)m_AudioTable[_job.type]; _track.source.clip = GetAudioClipFromAudioTrack(_job.type, _track); switch (_job.action) { case AudioAction.START: _track.source.Play(); break; case AudioAction.STOP: //If fade is false, stop the track immediately if (!_job.fade) { _track.source.Stop(); } break; case AudioAction.RESTART: _track.source.Stop(); _track.source.Play(); break; } if (_job.fade) { //If the job action is start or restart the initial volume will be 0, otherwise 1 as it will be fading out float _initialVolumeValue = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 0.0f : 1.0f; //Sets the target value of the volume lerp float _target = _initialVolumeValue == 0 ? 1 : 0; float _duration = 1.0f; float _timer = 0.0f; //lerp the volume parameter while (_timer < _duration) { _track.source.volume = Mathf.Lerp(_initialVolumeValue, _target, _timer / _duration); _timer += Time.deltaTime; yield return(null); } if (_job.action == AudioAction.STOP) { _track.source.Stop(); } } m_JobTable.Remove(_job.type); yield return(null); }
private IEnumerator RunAudioJob(AudioJob _job) { yield return(new WaitForSeconds(_job.delay)); AudioTrack _track = (AudioTrack)m_AudioTable[_job.type]; _track.source.clip = GetAudioClipFromAudioTrack(_job.type, _track); switch (_job.action) { case AudioAction.START: _track.source.Play(); break; case AudioAction.STOP: if (!_job.fade) { _track.source.Stop(); } break; case AudioAction.RESTART: _track.source.Stop(); _track.source.Play(); break; } if (_job.fade == true && _job.fadeDuration == 0f) { LogWarning("Fade is enabled but fade duration is set to 0 for [" + _job.type + "." + _job.action + "]. " + "Please give fade duration to achieve fade effect otherwise there will be no fade effect !!"); _job.fade = false; } if (_job.fade) { float _duration = _job.fadeDuration; float _initial = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 0f : 1f; float _target = _initial == 0 ? 1 : 0; float _timer = 0f; while (_timer <= _duration) { _track.source.volume = Mathf.Lerp(_initial, _target, _timer / _duration); _timer += Time.deltaTime; yield return(null); } if (_job.action == AudioAction.STOP) { _track.source.Stop(); } } m_JobTable.Remove(_job.type); Log("Job count: " + m_JobTable.Count); yield return(null); }
public void SetAudioJob(AudioJob job) { audio1Bitrate.Value = job.Settings.Bitrate; if (job.Type != null && audio1Type.Items.Contains(job.Type)) { audio1Type.SelectedItem = job.Type; } }
private void AddJob(AudioJob _job) { RemoveConflictingJobs(_job.type); IEnumerator _jobRunner = RunAudioJob(_job); jobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); }
void PlayFootstepAtVolume(bool shouldForcePlay, float tempVolume) { AudioJob audioJob = leftFootActive ? audioJobLeft : audioJobRight; float tmp = audioJob.audio.volume; audioJob.audio.volume = tempVolume; PlayFootstep(shouldForcePlay); audioJob.audio.volume = tmp; }
////////////////////// // Public Interface // ////////////////////// // Plays an AudioClip, should only be used for global (non-3D) sounds public void Play(AudioName audioType, string uniqueIdentifier, bool shouldForcePlay = false, Action <AudioJob> settingsOverride = null) { AudioJob audioJob = GetOrCreateJob(audioType, uniqueIdentifier, settingsOverride); if (!audioJob.audio.isPlaying || shouldForcePlay) { settingsOverride?.Invoke(audioJob); Play(audioJob); } }
IEnumerator RunAudioJob(AudioJob _job) { yield return(new WaitForSeconds(_job.delay)); AudioTrack _track = (AudioTrack)m_AudioTable[_job.type]; _track.source.clip = GetAudioClipFromTrack(_job.type, _track); switch (_job.action) { case AudioAction.START: _track.source.Play(); break; case AudioAction.STOP: if (!_job.fade) { //No fade out _track.source.Stop(); } break; case AudioAction.RESTART: _track.source.Stop(); _track.source.Play(); break; } if (_job.fade) { float _initialValue = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 0.0f : 1.0f; float _target = _initialValue == 0.0f ? 1.0f : 0.0f; float _duration = 1.0f; float _timer = 0.0f; while (_timer <= _duration) { _track.source.volume = Mathf.Lerp(_initialValue, _target, _timer / _duration); _timer += Time.deltaTime; yield return(null); } if (_job.action == AudioAction.STOP) { _track.source.Stop(); } } m_JobTable.Remove(_job.type); Log("Job count: " + m_JobTable.Count); yield return(null); }
private void AddJob(AudioJob newJob) { RemoveConflictJobs(newJob.type); IEnumerator jobRunnerCoroutine = RunAudioJob(newJob); jobTable.Add(newJob.type, jobRunnerCoroutine); StartCoroutine(jobRunnerCoroutine); Log($"Starting job on [{newJob.type}] with operation {newJob.action}"); }
protected override void Start() { base.Start(); playerMovement = GetComponent <PlayerMovement>(); ruffleSound = AudioManager.instance.GetOrCreateJob(AudioName.PlayerJumpLandingRuffle, ID); thumpSound = AudioManager.instance.GetOrCreateJob(AudioName.PlayerJumpLandingThump, ID); ruffleStartVolume = ruffleSound.audio.volume; thumpStartVolume = thumpSound.audio.volume; }
private void AddJob(AudioJob _job) { // cancel any job that might be using this job's audio source RemoveConflictingJobs(_job.type); IEnumerator _jobRunner = RunAudioJob(_job); m_JobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); Log("Starting job on [" + _job.type + "] with operation: " + _job.action); }
// Plays an AudioClip at the given position public void PlayAtLocation(AudioName audioType, string uniqueIdentifier, Vector3 location, bool shouldForcePlay = false, Action <AudioJob> settingsOverride = null) { AudioJob audioJob = GetOrCreateJob(audioType, uniqueIdentifier, settingsOverride); if (!audioJob.audio.isPlaying || shouldForcePlay) { audioJob.audio.transform.position = location; settingsOverride?.Invoke(audioJob); Play(audioJob); } }
private void AddJob(AudioJob job) { // cancel any job that might be using this job's audio source RemoveConflictingJobs(job._type); var jobRunner = RunAudioJob(job); mJobTable.Add(job._type, jobRunner); Timing.RunCoroutine(jobRunner); Log("Starting job on [" + job._type + "] with operation: " + job._action); }
private IEnumerator RunAudioJob(AudioJob _job) { yield return(new WaitForSeconds(_job.delay)); AudioTrack _track = (AudioTrack)m_AudioTable[_job.type]; _track.source.clip = GetAudioClipFromAudioTrack(_job.type, _track); switch (_job.action) { case AudioAction.START: _track.source.Play(); break; case AudioAction.STOP: if (!_job.fade) { _track.source.Stop(); } break; case AudioAction.RESTART: _track.source.Stop(); _track.source.Play(); break; } if (_job.fade) { float _initial = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 0.0f : 1.0f; float _target = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 1.0f : 0.0f; float _duration = 1.0f; //TODO make this a variable not a constant so we can modify the amount of time that we want to fade. float _timer = 0.0f; while (_timer <= _duration) { _track.source.volume = Mathf.Lerp(_initial, _target, _timer / _duration); //this makes a number go from an initial value to a target value on a normalised scale _timer += Time.deltaTime; yield return(null); } if (_job.action == AudioAction.STOP) { _track.source.Stop(); } } m_JobTable.Remove(_job.type); Log("Job Count: " + m_JobTable.Count); yield return(null); }
private void AddJob(AudioJob _job) { //Remove any conflicting jobs RemoveConflictingJobs(_job.type); //Add the job and start it IEnumerator _jobRunner = RunAudioJob(_job); m_JobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); }
private void AddJob(AudioJob _job) { // Remove conflictig jobs RemoveConflictingJobs(_job.type); // Start job IEnumerator _jobRunner = RunAudioJob(_job); m_JobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); Log("Starting job on [" + _job.type + "] with operation: " + _job.action); }
public byte[] TestAudioAPI(string language, List <VoicePropriety> voicepropriety, string age = "", string gender = "", AudioSpeed audioSpeed = AudioSpeed.Normal, AudioFormat audioFormat = AudioFormat.Mp3) { var outputPath = IOController.GetOutputDirectory(); Language lang = Language.enGB; byte[] apiFileContent = null; if (Enum.IsDefined(typeof(Language), language.Substring(0, 4))) { apiFileContent = RoboBrailleProcessor.GetEncodingByCountryCode(lang).GetBytes(IOController.GetTestForLanguage(language)); lang = (Language)Enum.Parse(typeof(Language), language.Substring(0, 4), true); } var voiceProps = new List <VoicePropriety>(); if (age.Equals("Young")) { voiceProps.Add(VoicePropriety.Younger); } if (age.Equals("Old")) { voiceProps.Add(VoicePropriety.Older); } if (gender.Equals("Male")) { voiceProps.Add(VoicePropriety.Male); } if (gender.Equals("Female")) { voiceProps.Add(VoicePropriety.Female); } if (voicepropriety != null) { voiceProps.AddRange(voicepropriety); } AudioJob auj = new AudioJob() { Id = Guid.NewGuid(), FileContent = apiFileContent, UserId = Guid.Parse("d2b97532-e8c5-e411-8270-f0def103cfd0"), FileExtension = "" + ".mp3", FileName = "testAudio", MimeType = "plain/text", Status = JobStatus.Started, SubmitTime = DateTime.Now, DownloadCounter = 0, InputFileHash = RoboBrailleProcessor.GetMD5Hash(apiFileContent), AudioLanguage = lang, FormatOptions = audioFormat, SpeedOptions = audioSpeed, VoicePropriety = voiceProps.ToArray() }; return(WebAPICall(auj).Result); }
private void AddJob(AudioJob _job) { //Remove conflicting jobs (lets say that one job on Soundtrack is running, and we need to play another, we need to turn the other off). Its an important edge case to take care of. //RemoveConflictingJobs(_job.type); //Start the job IEnumerator _jobRunner = RunAudioJob(_job); m_JobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); Log("Starting Job on [" + _job.type + "] with the operation [" + _job.action); }
private void AddJob(AudioJob _job, float floatVar) { //remove conflicting jobs RemoveConflictingJobs(_job); //start job IEnumerator _jobRunner = RunAudioJob(_job, floatVar); m_JobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); Log("Starting job with float on [" + _job.type + "] with operation: " + _job.action + " and float: " + floatVar); }
private void AddJob(AudioJob _job) { Log($"AudioJob {_job}"); // remove conflicting jobs RemoveConflictingJobs(_job.type); //star job IEnumerator _jobRunner = RunAudioJob(_job); m_JobTable.Add(_job.type, _jobRunner); StartCoroutine(_jobRunner); Log($"Starting job on [{_job.type}] with operation {_job.action}"); }
private void AddJob(AudioJob job) { //Remove the conflict jobs RemoveConflictingJobs(job.type); //start job IEnumerator jobRunner = RunAudioJob(job); jobTable.Add(job.type, jobRunner); StartCoroutine(jobRunner); Log("Starting job on [" + job.type + "] with operation " + job.action); }
private void AddJob(AudioJob job) { RemoveConflictingJobs(job.type); IEnumerator jobRunner = RunAudioJob(job); if (jobRunner != null) { _jobTable.Add(job.type, jobRunner); StartCoroutine(jobRunner); } Log("Starting job on - " + job.type + " with operation - " + job.action); }
private IEnumerator RunAudioJob(AudioJob _job) { yield return(new WaitForSeconds(_job.delay)); AudioTrack _track = GetAudioTrack(_job.type); // track existence should be verified by now _track.source.clip = GetAudioClipFromAudioTrack(_job.type, _track); switch (_job.action) { case AudioAction.START: _track.source.Play(); break; case AudioAction.STOP: if (!_job.fade) { _track.source.Stop(); } break; case AudioAction.RESTART: _track.source.Stop(); _track.source.Play(); break; } // fade volume if (_job.fade) { float _initial = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 0 : 1; float _target = _initial == 0 ? 1 : 0; float _duration = 1.0f; float _timer = 0.0f; while (_timer < _duration) { _track.source.volume = Mathf.Lerp(_initial, _target, _timer / _duration); _timer += Time.deltaTime; yield return(null); } if (_job.action == AudioAction.STOP) { _track.source.Stop(); } } m_JobTable.Remove(_job.type); Log("Job count: " + m_JobTable.Count); }
private IEnumerator RunAudioJob(AudioJob _job) { yield return(new WaitForSeconds(_job.delay)); // Which channel does this audio type play on? Update that channel's clip AudioChannel _channel = (AudioChannel)audioTable[_job.type]; _channel.source.clip = GetAudioClipFromChannel(_job.type, _channel); switch (_job.action) { case AudioAction.START: _channel.source.Play(); break; case AudioAction.STOP: _channel.source.Stop(); break; case AudioAction.RESTART: _channel.source.Stop(); _channel.source.Play(); break; } // handle volume fades if (_job.fade) { float _initial = _job.action == AudioAction.START || _job.action == AudioAction.RESTART ? 0 : 1; float _target = _initial == 0 ? 1 : 0; float _duration = 1.0f; float _timer = 0.0f; while (_timer < _duration) { _channel.source.volume = Mathf.Lerp(_initial, _target, _timer / _duration); _timer += Time.deltaTime; yield return(null); } if (_job.action == AudioAction.STOP) { _channel.source.Stop(); } } jobTable.Remove(_job.type); }
private IEnumerator <float> RunAudioJob(AudioJob job) { yield return(Timing.WaitForSeconds(job._delay)); var track = GetAudioTrack(job._type); // track existence should be verified by now track.source.clip = GetAudioClipFromAudioTrack(job._type, track); switch (job._action) { case AudioAction.Start: track.source.Play(); break; case AudioAction.Stop: if (!job._fade) { track.source.Stop(); } break; case AudioAction.Restart: track.source.Stop(); track.source.Play(); break; } if (job._fade) { float initial = job._action == AudioAction.Start || job._action == AudioAction.Restart ? 0 : 1; float target = Math.Abs(initial) < 0.0001f ? 1 : 0; var duration = job._fadeDuration; var timer = 0.0f; while (timer < duration) { track.source.volume = Mathf.Lerp(initial, target, timer / duration); timer += Time.deltaTime; yield return(Timing.WaitForOneFrame); } if (job._action == AudioAction.Stop) { track.source.Stop(); } } mJobTable.Remove(job._type); Log("Job count: " + mJobTable.Count); }
public void SetAudioJob(AudioJob job) { audio1Bitrate.Value = job.Settings.Bitrate; if (job.Type != null && audio1Type.Items.Contains(job.Type)) audio1Type.SelectedItem = job.Type; }