public AudioClipInfo(AudioType type, string resourcename, string systemname, float initvolume, int maxSEcount = 16 ) { ResourceName = resourcename; if (type == AudioType.SE) { ResourcePath = SEPath + resourcename; } else if (type == AudioType.BGM) { ResourcePath = BGMPath + resourcename; } SystemName = systemname; MaxSECount = maxSEcount; PlayingList = new List<SEInfo>(maxSEcount); StockList = new SortedList<int, SEInfo>(maxSEcount); InitVolume = initvolume; AttenuateRate = calcAttenuateRate(); //Debug.Log("Att: " + AttenuateRate); for (int i = 0; i < MaxSECount; i++) { //Debug.LogFormat("InitVol:{0}",InitVolume); SEInfo seinfo = new SEInfo(i, 0f, InitVolume * Mathf.Pow(AttenuateRate, i)); //SEInfo seinfo = new SEInfo(i, 0f, InitVolume); StockList.Add(seinfo.Index, seinfo); //Debug.Log("Vol: " + seinfo.Volume); } Loop = null; }
public void PlayAudio(AudioType type) { foreach (var audio in AudioList.Where(audio => audio.type == type)) { audio.Play(); } }
/** // \fn public Playlist(int id, string name, string filename, AudioType type) // // \brief Constructor. // // \author Simon Menetrey // \date 26.05.2014 // // \param id The identifier. // \param name The name. // \param filename Filename of the file. // \param type The type. **/ public Playlist(int id, string name, string filename, AudioType type) { this.Id = id; this.Name = name; this.Filename = filename; this.Type = type; this.AudioFileList = new List<string>(); }
/// <summary> /// Initializes a new instance of the <see cref="LocalSong"/> class. /// </summary> /// <param name="path">The path of the file.</param> /// <param name="audioType">The audio type.</param> /// <param name="duration">The duration of the song.</param> public LocalSong(string path, AudioType audioType, TimeSpan duration) : base(path, audioType, duration) { if (this.IsRemovable) { this.StreamingPath = this.OriginalPath; } }
public void play(AudioSource source, AudioType type) { if (canPlay (type)) { source.PlayOneShot (source.clip); audiosPlaying [type] = audiosPlaying [type] + 1; StartCoroutine (decrementAudiosPlaying (type, source.clip.length)); } }
public static Mock<Song> CreateSongMock(string name = "Song", bool callBase = false, AudioType audioType = AudioType.Mp3, TimeSpan? duration = null) { if (duration == null) { duration = TimeSpan.Zero; } return new Mock<Song>(name, audioType, duration) { CallBase = callBase }; }
private VideoInfo(int formatCode, VideoType videoType, int resolution, bool is3D, AudioType audioType, int audioBitrate) { this.FormatCode = formatCode; this.VideoType = videoType; this.Resolution = resolution; this.Is3D = is3D; this.AudioType = audioType; this.AudioBitrate = audioBitrate; }
private VideoInfo(int formatCode, VideoType videoType, int resolution, bool is3D, AudioType audioType, int audioBitrate) { FormatCode = formatCode; VideoType = videoType; Resolution = resolution; Is3D = is3D; AudioType = audioType; AudioBitrate = audioBitrate; }
public AudioBinary(byte[] audioBytes, AudioType audioType) { if (audioBytes == null || audioBytes.Length < 1) { throw new InvalidOperationException("AudioBytes must contain values."); } AudioBytes = audioBytes; AudioType = audioType; }
public Audio(string name, string saveName, int matchScriptID) { Name = name; MatchScriptID = matchScriptID; SaveName = saveName; LengthMillis = AudioHelper.GetAudioLength(saveName); Length = AudioHelper.ConvertMillisToTime(LengthMillis); Type = AudioHelper.GetAudioType(saveName); }
protected override int AddNewData() { m_audioType = DataManager.Generate<AudioType>(); m_audioType.Name = m_textBox_name.Text; DataManager.AudioTypes.Add(m_audioType); return m_audioType.Id; }
/// <summary> /// Initializes a new instance of the <see cref="YoutubeSong"/> class. /// </summary> /// <param name="path">The path of the song.</param> /// <param name="audioType">The audio type.</param> /// <param name="duration">The duration of the song.</param> /// <param name="isStreaming">if set to true, the song streams from YouTube, instead of downloading.</param> /// <exception cref="ArgumentNullException"><c>path</c> is null.</exception> public YoutubeSong(string path, AudioType audioType, TimeSpan duration, bool isStreaming) : base(path, audioType, duration) { this.IsStreaming = isStreaming; if (this.IsStreaming) { this.StreamingPath = this.OriginalPath; } }
public static AudioSource GetLatestAudioSource( AudioType audioType ) { GameObject audioLayer = GetAudioLayer( audioType ); AudioSource[] audioSources = audioLayer.GetComponents<AudioSource>() as AudioSource[]; if( audioSources.Length > 0 ) return audioSources[ audioSources.Length - 1 ]; return audioLayer.AddComponent("AudioSource") as AudioSource; }
/// <summary> /// Adds audio from local computer to collection. /// </summary> /// <param name="filename">File to add</param> /// <param name="artist, title">Tags for file</param> /// <param name="audioType">If atFile, copies audio to working directory, it's also default behavior. /// If atLink, saves filename and uses it to access file later.</param> /// <returns>Returns <c>mpAudio</c> instance that refers to the new element of collection.</returns> public mpAudio addAudio(string filename, string artist, string title, AudioType audioType = AudioType.atFile) { mpAudio audio; if (audioType == AudioType.atLink) audio = new mpLocalAudioLink(filename, artist, title); else audio = new mpLocalAudio(filename, artist, title); audio.Id = ++maxId; if (audio.Id == int.MaxValue) OptimizeIndex(); return audio; }
/// <summary> /// Adds audio from vk.com to collection. /// </summary> /// <param name="vkAudio">Audio to add.</param> /// <param name="audioType">Set AudioType.atFile to save locally not only link, but also .mp3 itself.</param> /// <returns>Returns <c>mpAudio</c> instance that refers to the new element of collection.</returns> public mpAudio addAudio(PashaAudio vkAudio, AudioType audioType = AudioType.atLink) { mpAudio audio; if (audioType == AudioType.atFile) audio = new mpAudioLink(vkAudio); else audio = new mpLocalAudio(vkAudio); audio.Id = ++maxId; if (audio.Id == int.MaxValue) OptimizeIndex(); return audio; }
public static void Play( AudioClip clip, AudioType type ) { if( type == AudioType.SFX ) { instance.audioSFX.PlayOneShot( clip, 1f); } else if( type == AudioType.Music && clip != instance.audioMusic.clip ) { instance.audioMusic.loop = true; instance.audioMusic.volume = 0.5f; instance.audioMusic.clip = clip; instance.audioMusic.Play(); } }
public static bool IsMuted (AudioType audioType) { switch (audioType) { case AudioType.FX: return SettingsUtil.FXMuted; case AudioType.Music: return SettingsUtil.MusicMuted; case AudioType.VO: return SettingsUtil.VOMuted; default: throw new System.Collections.Generic.KeyNotFoundException(); } }
/** // \fn public AudioFile(int id, string filename, string title, string artist, string album, int year, string label, TimeSpan duration, string gender, AudioType audiotype) // // \brief Constructor. // // \author Simon Menetrey // \date 26.05.2014 // // \param id The identifier. // \param filename Filename of the audiofile. // \param title The title. // \param artist The artist. // \param album The album. // \param year The year. // \param label The label. // \param duration The duration. // \param gender The gender. // \param audiotype The audiotype. **/ public AudioFile(int id, string filename, string title, string artist, string album, int year, string label, TimeSpan duration, string gender, AudioType audiotype) { this.Id = id; this.Filename = filename; this.Title = title; this.Artist = artist; this.Album = album; this.Year = year; this.Label = label; this.Duration = duration; this.Gender = gender; this.Type = audiotype; }
public static float GetAudioLayerVolume( AudioType audioType ) { if( audioType == AudioType.Invalid ) return -1f; if( !InternalAudioLayerVolumes.ContainsKey( audioType ) ) { Debug.LogError( "Cannot find audio layer volume for " + audioType ); return -1f; } return InternalAudioLayerVolumes[ audioType ]; }
private void typeCombo_SelectedIndexChanged(object sender, EventArgs e) { if (typeCombo.SelectedIndex == 0) { type = AudioType.NSF; nsfPanel.Visible = true; wavPanel.Visible = false; } else { type = AudioType.Wav; nsfPanel.Visible = false; wavPanel.Visible = true; } }
private int AudioTypePriority(AudioType audioType) { switch (audioType) { case AudioType.Mp3: return 4; case AudioType.Aac: return 3; case AudioType.Vorbis: return 2; default: // anthing is better than unknown return 1; case AudioType.Unknown: return 0; } }
public UserControl_AudioType(AudioType audioType) : base("Audio Type", false) { m_audioType = audioType; if (DataIsNull()) { m_textBlock_id.Text = "N/A"; m_textBox_name.Text = string.Empty; } else { m_textBlock_id.Text = m_audioType.Id.ToString(); m_textBox_name.Text = m_audioType.Name; } }
public static void PlayOneShotControlled(this AudioSource source, List<AudioClip> clips, AudioType type) { if (source == null) { Debug.LogWarning("PlayOneShotControlled null audio source"); return; } if (!type.IsPlayable()) return; //#if !UNITY_3_5 && !UNITY_4_0 && !UNITY_4_1 && !UNITY_4_2 && !UNITY_4_3 && !UNITY_4_5 && !UNITY_4_6 //source.spatialBlend = 0f; //#endif if (clips.Count > 0) source.PlayOneShot(clips[Random.Range(0, clips.Count)]); else Debug.LogWarning("Empty sound list to play"); }
public AudioSource Play(int clip, Vector3 point, float volume, float pitch, AudioType type) { //Create an empty game object GameObject go = new GameObject("Audio: " + soundManager.clips[clip].name); go.transform.position = point; go.transform.parent = transform; //Create the source AudioSource source = go.AddComponent<AudioSource>(); source.spatialBlend = 1f; source.outputAudioMixerGroup = GetMixer(type); source.clip = soundManager.clips[clip]; source.volume = volume; source.pitch = pitch; source.Play(); Destroy(go, soundManager.clips[clip].length); return source; }
public static extern int ReadType([MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)] short[] buffer_out, int nb_wanted, AudioType filter);
internal int minBitrate(AudioType type) { return(0); }
public static IEnumerator LoadAudioFromCache(string fileUrl, Action <AudioClip> onLoad, Action onFail = null, AudioType type = AudioType.MPEG) { if (Application.platform == RuntimePlatform.WindowsEditor || Application.platform == RuntimePlatform.WindowsPlayer) { fileUrl = "file:///" + fileUrl; } else { fileUrl = "file://" + fileUrl; } using (var www = UnityWebRequestMultimedia.GetAudioClip(fileUrl, type)) { yield return(www); if (string.IsNullOrEmpty(www.error)) { var aud = DownloadHandlerAudioClip.GetContent(www); aud.name = GetFileName(fileUrl); onLoad.SafeInvoke(aud); } else { onFail.SafeInvoke(); } } }
public AudioClip GetAudioClipCompressed(bool threeD, AudioType audioType) { return(this.GetAudioClipInternal(threeD, false, true, audioType)); }
public VoipUpload DeleteUploadedFile(AudioType audioType, string fileName) { if (!CRMSecurity.IsAdmin) { throw CRMSecurity.CreateSecurityException(); } var store = Global.GetStore(); var path = Path.Combine(audioType.ToString().ToLower(), fileName); var result = new VoipUpload { AudioType = audioType, Name = fileName, Path = CommonLinkUtility.GetFullAbsolutePath(store.GetUri(path).ToString()) }; if (!store.IsFile("voip", path)) { throw new ItemNotFoundException(); } store.Delete("voip", path); var dao = DaoFactory.VoipDao; var numbers = dao.GetNumbers(); var defAudio = StorageFactory.GetStorage("", "crm").ListFiles("voip", "default/" + audioType.ToString().ToLower(), "*.*", true).FirstOrDefault(); if (defAudio == null) { return(result); } foreach (var number in numbers) { switch (audioType) { case AudioType.Greeting: if (number.Settings.GreetingAudio == result.Path) { number.Settings.GreetingAudio = CommonLinkUtility.GetFullAbsolutePath(defAudio.ToString()); } break; case AudioType.HoldUp: if (number.Settings.HoldAudio == result.Path) { number.Settings.HoldAudio = CommonLinkUtility.GetFullAbsolutePath(defAudio.ToString()); } break; case AudioType.Queue: var queue = number.Settings.Queue; if (queue != null && queue.WaitUrl == result.Path) { queue.WaitUrl = CommonLinkUtility.GetFullAbsolutePath(defAudio.ToString()); } break; case AudioType.VoiceMail: if (number.Settings.VoiceMail == result.Path) { number.Settings.VoiceMail = CommonLinkUtility.GetFullAbsolutePath(defAudio.ToString()); } break; } dao.SaveOrUpdateNumber(number); } return(result); }
public void Play(AudioType audioType, string fileName) { _adapter.Play(audioType, fileName); }
public void StopAudio(AudioType _type, bool _fade = false, float _delay = 0) { AddJob(new AudioJob(AudioAction.STOP, _type, _fade, _delay)); }
internal extern void InternalCreateAudioClip(string url, AudioType audioType);
public void SendMessageBroadcastAudio(string[] recipients, byte[] AudioData, AudioType audtype) { string to; List<string> foo = new List<string>(); foreach (string s in recipients) { foo.Add(GetJID(s)); } to = string.Join(",", foo.ToArray()); FMessage msg = this.getFmessageAudio(to, AudioData, audtype); if (msg != null) { this.SendMessage(msg); } }
IEnumerator decrementAudiosPlaying(AudioType type,float length) { yield return new WaitForSeconds (length); audiosPlaying [type] = audiosPlaying [type] - 1; }
private EffectAudioSourceItem _PlaySFX(string audioName, AudioType aType, Transform point, Vector3 pos, int sortId, bool isLoop) { if (string.IsNullOrEmpty(audioName)) { return(null); } if (_IsEffectAudioEnabled) { CheckAudioListener(); SafeInitSFX(); if (IsClipCached(audioName)) { //PlayAttachAudio(_ClipCache[audioName], false, trans, audioName, sortId, IsLoop); EffectAudioSourceItem item = GetUsableItem(aType, sortId, pos); if (item != null && item.audioSource != null) { AudioClip clip; LoadCache(audioName, out clip); GetProperSettingOfItem(item, aType, clip, audioName, sortId, isLoop); if (aType == AudioType.AttS3D) { item.attachedPos = point; } if (aType == AudioType.S3D) { item.audioSource.transform.position = pos; } item.audioSource.Play(); return(item); } } else { EffectAudioSourceItem item_arranged = GetUsableItem(aType, sortId, pos); if (item_arranged != null) { Action <UnityEngine.Object> callback = asset => { if (!_IsEffectAudioEnabled || asset == null) { if (item_arranged != null) { item_arranged.isLoading = 0; item_arranged.audioName = ""; } return; } if (item_arranged != null) { if (item_arranged.id != -1 && item_arranged.audioSource != null) { if (item_arranged.isLoading == 1 && audioName == item_arranged.audioName) { //EffectAudioSourceItem item = FindInternalItem(item_arranged); item_arranged.isLoading = 0; GetProperSettingOfItem(item_arranged, aType, asset as AudioClip, audioName, sortId, isLoop); if (aType == AudioType.AttS3D) { item_arranged.attachedPos = point; } if (aType == AudioType.S3D) { item_arranged.audioSource.transform.position = pos; } if (item_arranged.audioSource != null) { item_arranged.audioSource.Play(); } } } item_arranged = null; } }; item_arranged.isLoading = 1; item_arranged.audioName = audioName; //Debug.Log("Ask " + audioName + " " + item_arranged.audioSource.name); CAssetBundleManager.AsyncLoadResource(audioName, callback, false); return(item_arranged); } } } return(null); }
public static IObservable <AudioClip> GetAudioClipAsObservable(string _url, AudioType _audioType) { return(ObservableUnity.FromCoroutine <AudioClip>((_observer, _cancellation) => FetchAudioRequest(UnityWebRequestMultimedia.GetAudioClip(_url, _audioType), _observer))); }
protected virtual IEnumerator playAudioFile(Model.Wrapper wrapper, string url, string outputFile, AudioType type = AudioType.WAV, bool isNative = false, bool isLocalFile = true, System.Collections.Generic.Dictionary <string, string> headers = null) { //Debug.LogWarning("playAudioFile: " + wrapper); if (wrapper != null && wrapper.Source != null) { if (!isLocalFile || (isLocalFile && new System.IO.FileInfo(outputFile).Length > 1024)) { #if UNITY_2017_1_OR_NEWER using (UnityWebRequest www = UnityWebRequestMultimedia.GetAudioClip(url.Trim(), type)) #else using (UnityWebRequest www = UnityWebRequest.GetAudioClip(url.Trim(), type)) #endif { if (headers != null) { foreach (System.Collections.Generic.KeyValuePair <string, string> kvp in headers) { www.SetRequestHeader(kvp.Key, kvp.Value); } } #if UNITY_2017_2_OR_NEWER yield return(www.SendWebRequest()); #else yield return(www.Send()); #endif #if UNITY_2017_1_OR_NEWER if (!www.isHttpError && !www.isNetworkError) #else if (string.IsNullOrEmpty(www.error)) #endif { //just for testing! //string outputFile = Util.Config.AUDIOFILE_PATH + wrapper.Uid + extension; //System.IO.File.WriteAllBytes(outputFile, www.bytes); #if UNITY_WEBGL AudioClip ac = Util.WavMaster.ToAudioClip(www.downloadHandler.data); #else AudioClip ac = DownloadHandlerAudioClip.GetContent(www); do { yield return(ac); } while (ac.loadState == AudioDataLoadState.Loading); #endif //Debug.Log("ac.loadState: " + ac.loadState + " - " + www.downloadedBytes); if (ac.loadState == AudioDataLoadState.Loaded) { wrapper.Source.clip = ac; if (Util.Config.DEBUG) { Debug.Log("Text generated: " + wrapper.Text); } copyAudioFile(wrapper, outputFile, isLocalFile, www.downloadHandler.data); if (!isNative) { onSpeakAudioGenerationComplete(wrapper); } if ((isNative || wrapper.SpeakImmediately) && wrapper.Source != null) { wrapper.Source.Play(); onSpeakStart(wrapper); do { yield return(null); } while (!silence && Util.Helper.hasActiveClip(wrapper.Source)); if (Util.Config.DEBUG) { Debug.Log("Text spoken: " + wrapper.Text); } onSpeakComplete(wrapper); if (ac != null) { AudioClip.Destroy(ac); } } } else { string errorMessage = "Could not load the audio file the speech: " + wrapper; Debug.LogError(errorMessage); onErrorInfo(wrapper, errorMessage); } } else { string errorMessage = "Could not generate the speech: " + wrapper + System.Environment.NewLine + "WWW error: " + www.error; Debug.LogError(errorMessage); onErrorInfo(wrapper, errorMessage); } } } else { string errorMessage = "The generated audio file is invalid: " + wrapper; Debug.LogError(errorMessage); onErrorInfo(wrapper, errorMessage); } } else { string errorMessage = "'Source' is null: " + wrapper; Debug.LogError(errorMessage); onErrorInfo(wrapper, errorMessage); } }
public void LoadAudio(string audio, AudioType type, Action <AudioClip> callback) { StartCoroutine(WebUtil.Download(audio, type, callback)); }
/// <summary> /// Called when the engine service user gets the proper recording format of the STT engine. /// The recording format is used for creating the recorder. /// </summary> /// <param name="types">The format used by the recorder.</param> /// <param name="rate">The sample rate used by the recorder.</param> /// <param name="channels">The number of channels used by the recorder.</param> /// <returns> /// The following error codes can be returned: /// 1. None /// 2. InvalidState /// </returns> /// <since_tizen> 4 </since_tizen> public abstract Error GetRecordingFormat(out AudioType types, out int rate, out int channels);
public AudioClip(AssetPreloadData preloadData, bool readSwitch) { var sourceFile = preloadData.sourceFile; var reader = preloadData.InitReader(); m_Name = reader.ReadAlignedString(); version5 = sourceFile.version[0] >= 5; if (sourceFile.version[0] < 5) { m_Format = reader.ReadInt32(); //channels? m_Type = (AudioType)reader.ReadInt32(); m_3D = reader.ReadBoolean(); m_UseHardware = reader.ReadBoolean(); reader.Position += 2; //4 byte alignment if (sourceFile.version[0] >= 4 || (sourceFile.version[0] == 3 && sourceFile.version[1] >= 2)) //3.2.0 to 5 { int m_Stream = reader.ReadInt32(); m_Size = reader.ReadInt32(); var tsize = m_Size % 4 != 0 ? m_Size + 4 - m_Size % 4 : m_Size; //TODO: Need more test if (preloadData.Size + preloadData.Offset - reader.Position != tsize) { m_Offset = reader.ReadInt32(); m_Source = sourceFile.filePath + ".resS"; } } else { m_Size = reader.ReadInt32(); } } else { m_LoadType = reader.ReadInt32(); //Decompress on load, Compressed in memory, Streaming m_Channels = reader.ReadInt32(); m_Frequency = reader.ReadInt32(); m_BitsPerSample = reader.ReadInt32(); m_Length = reader.ReadSingle(); m_IsTrackerFormat = reader.ReadBoolean(); reader.Position += 3; m_SubsoundIndex = reader.ReadInt32(); m_PreloadAudioData = reader.ReadBoolean(); m_LoadInBackground = reader.ReadBoolean(); m_Legacy3D = reader.ReadBoolean(); reader.Position += 1; m_3D = m_Legacy3D; m_Source = reader.ReadAlignedString(); m_Offset = reader.ReadInt64(); m_Size = reader.ReadInt64(); m_CompressionFormat = (AudioCompressionFormat)reader.ReadInt32(); } if (readSwitch) { if (!string.IsNullOrEmpty(m_Source)) { var resourceFileName = Path.GetFileName(m_Source); var resourceFilePath = Path.GetDirectoryName(sourceFile.filePath) + "\\" + resourceFileName; if (!File.Exists(resourceFilePath)) { var findFiles = Directory.GetFiles(Path.GetDirectoryName(sourceFile.filePath), resourceFileName, SearchOption.AllDirectories); if (findFiles.Length > 0) { resourceFilePath = findFiles[0]; } } if (File.Exists(resourceFilePath)) { using (var resourceReader = new BinaryReader(File.OpenRead(resourceFilePath))) { resourceReader.BaseStream.Position = m_Offset; m_AudioData = resourceReader.ReadBytes((int)m_Size); } } else { if (Studio.resourceFileReaders.TryGetValue(resourceFileName.ToUpper(), out var resourceReader)) { resourceReader.Position = m_Offset; m_AudioData = resourceReader.ReadBytes((int)m_Size); } else { MessageBox.Show($"can't find the resource file {resourceFileName}"); } } } else { if (m_Size > 0) { m_AudioData = reader.ReadBytes((int)m_Size); } } } else { preloadData.InfoText = "Compression format: "; if (sourceFile.version[0] < 5) { switch (m_Type) { case AudioType.ACC: preloadData.extension = ".m4a"; preloadData.InfoText += "Acc"; break; case AudioType.AIFF: preloadData.extension = ".aif"; preloadData.InfoText += "AIFF"; break; case AudioType.IT: preloadData.extension = ".it"; preloadData.InfoText += "Impulse tracker"; break; case AudioType.MOD: preloadData.extension = ".mod"; preloadData.InfoText += "Protracker / Fasttracker MOD"; break; case AudioType.MPEG: preloadData.extension = ".mp3"; preloadData.InfoText += "MP2/MP3 MPEG"; break; case AudioType.OGGVORBIS: preloadData.extension = ".ogg"; preloadData.InfoText += "Ogg vorbis"; break; case AudioType.S3M: preloadData.extension = ".s3m"; preloadData.InfoText += "ScreamTracker 3"; break; case AudioType.WAV: preloadData.extension = ".wav"; preloadData.InfoText += "Microsoft WAV"; break; case AudioType.XM: preloadData.extension = ".xm"; preloadData.InfoText += "FastTracker 2 XM"; break; case AudioType.XMA: preloadData.extension = ".wav"; preloadData.InfoText += "Xbox360 XMA"; break; case AudioType.VAG: preloadData.extension = ".vag"; preloadData.InfoText += "PlayStation Portable ADPCM"; break; case AudioType.AUDIOQUEUE: preloadData.extension = ".fsb"; preloadData.InfoText += "iPhone"; break; } } else { switch (m_CompressionFormat) { case AudioCompressionFormat.PCM: preloadData.extension = ".fsb"; preloadData.InfoText += "PCM"; break; case AudioCompressionFormat.Vorbis: preloadData.extension = ".fsb"; preloadData.InfoText += "Vorbis"; break; case AudioCompressionFormat.ADPCM: preloadData.extension = ".fsb"; preloadData.InfoText += "ADPCM"; break; case AudioCompressionFormat.MP3: preloadData.extension = ".fsb"; preloadData.InfoText += "MP3"; break; case AudioCompressionFormat.VAG: preloadData.extension = ".vag"; preloadData.InfoText += "PlayStation Portable ADPCM"; break; case AudioCompressionFormat.HEVAG: preloadData.extension = ".vag"; preloadData.InfoText += "PSVita ADPCM"; break; case AudioCompressionFormat.XMA: preloadData.extension = ".wav"; preloadData.InfoText += "Xbox360 XMA"; break; case AudioCompressionFormat.AAC: preloadData.extension = ".m4a"; preloadData.InfoText += "AAC"; break; case AudioCompressionFormat.GCADPCM: preloadData.extension = ".fsb"; preloadData.InfoText += "Nintendo 3DS/Wii DSP"; break; case AudioCompressionFormat.ATRAC9: preloadData.extension = ".at9"; preloadData.InfoText += "PSVita ATRAC9"; break; } } if (preloadData.extension == null) { preloadData.extension = ".AudioClip"; preloadData.InfoText += "Unknown"; } preloadData.InfoText += "\n3D: " + m_3D; preloadData.Text = m_Name; if (m_Source != null) { preloadData.fullSize = preloadData.Size + (int)m_Size; } } }
private extern static IntPtr Create(DownloadHandlerAudioClip obj, string url, AudioType audioType);
public AudioClip GetAudioClip(bool threeD, bool stream, AudioType audioType) { return(this.GetAudioClipInternal(threeD, stream, false, audioType)); }
public void AdjustPitch(AudioType _type, float p) { AudioTrack m_track = m_AudioTable[_type]; m_track.source.pitch = p; }
private extern AudioClip GetAudioClipInternal(bool threeD, bool stream, bool compressed, AudioType audioType);
// 소리를 재생시킵니다. public AudioInstance PlayAudio(AudioClip audioClip, bool loop = false, float volume = 1.0f, float pitch = 1.0f, AudioType audioType = AudioType.Effect) { // 사용 가능한 AudioInstance 를 얻습니다. var audioInst = _AudioInstancePool.GetRecycledObject() ?? _AudioInstancePool.RegisterRecyclableObject(Instantiate(_AudioInstancePrefabs)); // 해당 객체를 AudioManager 객체 하위에 포함시킵니다. audioInst.transform.SetParent(transform); // 소리를 재생시킵니다. return(audioInst.Play(audioClip, loop, volume, pitch, audioType)); }
private void InternalCreateAudioClip(string url, AudioType audioType) { m_Ptr = Create(this, url, audioType); }
public void RestartAudio(AudioType _type, bool _fade = false, float _delay = 0) { AddJob(new AudioJob(AudioAction.RESTART, _type, _fade, _delay)); }
public DownloadHandlerAudioClip(string url, AudioType audioType) { InternalCreateAudioClip(url, audioType); }
public DownloadHandlerAudioClip(Uri uri, AudioType audioType) { InternalCreateAudioClip(uri.AbsoluteUri, audioType); }
public override void fillInfo() { base.fillInfo(); Type = AType = VideoUtil.guessAudioType(Filename); }
public void PauseAudio(AudioType _type) { AudioTrack m_track = m_AudioTable[_type]; m_track.source.Stop(); }
bool canPlay(AudioType type) { return audiosPlaying [type] < audiosMaxCap [type]; }
internal Object GetAudioClipInternal(bool threeD, bool stream, bool compressed, AudioType audioType) { return(WebRequestWWW.InternalCreateAudioClipUsingDH(this._uwr.downloadHandler, this._uwr.url, stream, compressed, audioType)); }
public void SendMessageAudio(string to, byte[] audioData, AudioType audtype) { FMessage msg = this.getFmessageAudio(to, audioData, audtype); if (msg != null) { this.SendMessage(msg); } }
public static StorageRequest GetAuthorizedStorageRequestAudioClip(StorageServiceClient client, string resourcePath = "", Dictionary <string, string> queryParams = null, Dictionary <string, string> headers = null, int contentLength = 0, AudioType audioType = AudioType.WAV) { string requestUrl = RequestUrl(client, queryParams, resourcePath); StorageRequest request = new StorageRequest(UnityWebRequestMultimedia.GetAudioClip(requestUrl, audioType)); request.AuthorizeRequest(client, Method.GET, resourcePath, queryParams, headers, contentLength); return(request); }
protected FMessage getFmessageAudio(string to, byte[] audioData, AudioType audtype) { to = GetJID(to); string type = string.Empty; string extension = string.Empty; switch (audtype) { case AudioType.WAV: type = "audio/wav"; extension = "wav"; break; case AudioType.OGG: type = "audio/ogg"; extension = "ogg"; break; default: type = "audio/mpeg"; extension = "mp3"; break; } //create hash string filehash = string.Empty; using (HashAlgorithm sha = HashAlgorithm.Create("sha256")) { byte[] raw = sha.ComputeHash(audioData); filehash = Convert.ToBase64String(raw); } //request upload WaUploadResponse response = this.UploadFile(filehash, "audio", audioData.Length, audioData, to, type, extension); if (response != null && !String.IsNullOrEmpty(response.url)) { //send message FMessage msg = new FMessage(to, true) { media_wa_type = FMessage.Type.Audio, media_mime_type = response.mimetype, media_name = response.url.Split('/').Last(), media_size = response.size, media_url = response.url, media_duration_seconds = response.duration }; return msg; } return null; }
public static FileInfo OutputLocation(this FileInfo file, AudioType type) { return(OutputLocation(file, type, "_audio")); }
public override void InitializeFromXml(System.Xml.XmlNode node) { XmlNode audio = node["Audio"]; filename = audio.Attributes["Filename"].Value; if (audio.Attributes["Volume"] != null) { volume = Convert.ToInt32(audio.Attributes["Volume"].Value); } if (audio.Attributes["Mute"] != null) { mute = Convert.ToBoolean(audio.Attributes["Mute"].Value); } if (audio.Attributes["TrackType"] != null) { trackType = (AudioType)Enum.Parse(typeof(AudioType), audio.Attributes["TrackType"].Value); } if (audio.Attributes["Begin"] != null) { begin = double.Parse(audio.Attributes["Begin"].Value); } if (audio.Attributes["End"] != null) { end = double.Parse(audio.Attributes["End"].Value); } if (audio.Attributes["FadeIn"] != null) { fadeIn = double.Parse(audio.Attributes["FadeIn"].Value); } if (audio.Attributes["FadeOut"] != null) { fadeOut = double.Parse(audio.Attributes["FadeOut"].Value); } if (audio.Attributes["Loop"] != null) { loop = bool.Parse(audio.Attributes["Loop"].Value); } }
private VideoInfo(int formatCode, VideoType videoType, int resolution, bool is3D, AudioType audioType, int audioBitrate, AdaptiveType adaptiveType) : this(formatCode, videoType, resolution, is3D, audioType, audioBitrate, adaptiveType, 0) { }