private void RemoveVoice(bool sendUnityMsg) { if (this.subscribedToSystemChanges) { AudioSettings.OnAudioConfigurationChanged -= this.OnAudioConfigChanged; this.subscribedToSystemChanges = false; } if (this.VoiceDetector != null) { this.voiceDetectionThreshold = this.VoiceDetector.Threshold; } if (this.voice != LocalVoiceAudioDummy.Dummy) { this.voice.RemoveSelf(); } if (this.inputSource != null) { this.inputSource.Dispose(); this.inputSource = null; } if (sendUnityMsg) { this.gameObject.SendMessage("PhotonVoiceRemoved", SendMessageOptions.DontRequireReceiver); } this.isRecording = false; this.RequiresRestart = false; }
private void RemoveVoice(bool sendUnityMsg) { if (this.subscribedToSystemChanges) { this.UnsubscribeFromSystemChanges(); } this.GetThresholdFromDetector(); this.GetStatusFromDetector(); this.GetActivityDelayFromDetector(); if (this.voice != LocalVoiceAudioDummy.Dummy) { this.interestGroup = this.voice.InterestGroup; if (this.debugEchoMode && this.interestGroup != 0) { this.debugEchoMode = false; } this.voice.RemoveSelf(); this.voice = LocalVoiceAudioDummy.Dummy; } if (this.inputSource != null) { this.inputSource.Dispose(); this.inputSource = null; } if (sendUnityMsg) { this.gameObject.SendMessage("PhotonVoiceRemoved", SendMessageOptions.DontRequireReceiver); } this.isRecording = false; this.RequiresRestart = false; }
/// <summary> /// Creates outgoing audio stream of type automatically assigned and adds procedures (callback or serviceable) for consuming given audio source data. /// Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler. /// </summary> /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param> /// <param name="source">Streaming audio source.</param> /// <param name="forceShort">For audio sources producing buffers of 'float' type, creates stream of 'short' type and adds converter.</param> /// <param name="channelId">Transport channel specific to transport.</param> /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param> /// <returns>Outgoing stream handler.</returns> /// <remarks> /// audioSourceDesc.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case. /// </remarks> public LocalVoice CreateLocalVoiceAudioFromSource(VoiceInfo voiceInfo, IAudioDesc source, bool forceShort = false, int channelId = 0, IEncoder encoder = null) { if (source is IAudioPusher <float> ) { if (forceShort) { var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, channelId, encoder); // we can safely reuse the same buffer in callbacks from native code // var bufferFactory = new FactoryReusableArray <float>(0); ((IAudioPusher <float>)source).SetCallback(buf => { var shortBuf = localVoice.BufferFactory.New(buf.Length); AudioUtil.Convert(buf, shortBuf, buf.Length); localVoice.PushDataAsync(shortBuf); }, bufferFactory); return(localVoice); } else { var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, channelId, encoder); ((IAudioPusher <float>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice.BufferFactory); return(localVoice); } } else if (source is IAudioPusher <short> ) { var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, channelId, encoder); ((IAudioPusher <short>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice.BufferFactory); return(localVoice); } else if (source is IAudioReader <float> ) { if (forceShort) { transport.LogInfo("[PV] Creating local voice with source samples type conversion from float to short."); var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, channelId, encoder); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPoolFloatToShort(localVoice, source as IAudioReader <float>); return(localVoice); } else { var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, channelId, encoder); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPool <float>(localVoice, source as IAudioReader <float>); return(localVoice); } } else if (source is IAudioReader <short> ) { var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, channelId, encoder); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPool <short>(localVoice, source as IAudioReader <short>); return(localVoice); } else { transport.LogError("[PV] CreateLocalVoiceAudioFromSource does not support Voice.IAudioDesc of type {0}", source.GetType()); return(LocalVoiceAudioDummy.Dummy); } }
private void RemoveVoice(bool sendUnityMsg) { if (this.voice != LocalVoiceAudioDummy.Dummy) { this.voice.RemoveSelf(); } if (this.inputSource != null) { this.inputSource.Dispose(); this.inputSource = null; } if (sendUnityMsg) { this.gameObject.SendMessage("PhotonVoiceRemoved", SendMessageOptions.DontRequireReceiver); } this.IsInitialized = false; }
private LocalVoice CreateLocalVoiceAudioAndSource() { switch (SourceType) { case InputSourceType.Microphone: { if (this.MicrophoneType == MicType.Photon) { #if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX || UNITY_EDITOR_WIN var hwMicDev = this.PhotonMicrophoneDeviceId; if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to Photon microphone device [{0}] \"{1}\"", hwMicDev, PhotonMicrophoneEnumerator.NameAtIndex(hwMicDev)); } #else if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to Photon microphone device"); } #endif #if UNITY_STANDALONE_WIN && !UNITY_EDITOR || UNITY_EDITOR_WIN if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to WindowsAudioInPusher"); } inputSource = new Windows.WindowsAudioInPusher(hwMicDev, this.Logger); #elif UNITY_IOS && !UNITY_EDITOR if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to IOS.AudioInPusher with session {0}", audioSessionParameters); } inputSource = new IOS.AudioInPusher(audioSessionParameters, this.Logger); #elif UNITY_STANDALONE_OSX && !UNITY_EDITOR || UNITY_EDITOR_OSX if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to MacOS.AudioInPusher"); } inputSource = new MacOS.AudioInPusher(hwMicDev, this.Logger); #elif UNITY_ANDROID && !UNITY_EDITOR if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to UnityAndroidAudioInAEC"); } inputSource = new UnityAndroidAudioInAEC(this.Logger); #else inputSource = new AudioDesc(0, 0, "Photon microphone type is not supported for the current platform."); #endif if (inputSource.Error == null) { break; } if (this.Logger.IsErrorEnabled) { this.Logger.LogError("Photon microphone input source creation failure: {0}. Falling back to Unity microphone", inputSource.Error); } } if (Microphone.devices.Length < 1) { if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("No Microphone"); } return(LocalVoiceAudioDummy.Dummy); } var micDev = this.UnityMicrophoneDevice; if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to Unity microphone device {0}", micDev); } // mic can ignore passed sampling rate and set its own inputSource = new MicWrapper(micDev, (int)SamplingRate, this.Logger); if (inputSource.Error != null && this.Logger.IsErrorEnabled) { this.Logger.LogError("Unity microphone input source creation failure: {0}.", inputSource.Error); } } break; case InputSourceType.AudioClip: { if (AudioClip == null) { if (this.Logger.IsErrorEnabled) { this.Logger.LogError("AudioClip property must be set for AudioClip audio source"); } return(LocalVoiceAudioDummy.Dummy); } inputSource = new AudioClipWrapper(AudioClip); // never fails, no need to check Error if (this.LoopAudioClip) { ((AudioClipWrapper)inputSource).Loop = true; } } break; case InputSourceType.Factory: { if (InputFactory == null) { if (this.Logger.IsErrorEnabled) { this.Logger.LogError("Recorder.InputFactory must be specified if Recorder.Source set to Factory"); } return(LocalVoiceAudioDummy.Dummy); } inputSource = InputFactory(); if (inputSource.Error != null && this.Logger.IsErrorEnabled) { this.Logger.LogError("InputFactory creation failure: {0}.", inputSource.Error); } } break; default: if (this.Logger.IsErrorEnabled) { this.Logger.LogError("unknown Source value {0}", SourceType); } return(LocalVoiceAudioDummy.Dummy); } if (this.inputSource == null || this.inputSource.Error != null) { return(LocalVoiceAudioDummy.Dummy); } VoiceInfo voiceInfo = VoiceInfo.CreateAudioOpus(SamplingRate, inputSource.Channels, FrameDuration, Bitrate, UserData); return(client.CreateLocalVoiceAudioFromSource(voiceInfo, inputSource, forceShort)); }
/// <summary> /// Creates outgoing audio stream. Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler. /// </summary> /// <typeparam name="T">Element type of audio array buffers.</typeparam> /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param> /// <param name="channelId">Transport channel specific to transport.</param> /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param> /// <returns>Outgoing stream handler.</returns> /// <remarks> /// audioSourceDesc.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case. /// </remarks> public LocalVoiceAudio <T> CreateLocalVoiceAudio <T>(VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId = 0, IEncoder encoder = null) { return((LocalVoiceAudio <T>)createLocalVoice(voiceInfo, channelId, (vId, chId) => LocalVoiceAudio <T> .Create(this, vId, encoder, voiceInfo, audioSourceDesc, chId))); }
private LocalVoice CreateLocalVoiceAudioAndSource() { switch (SourceType) { case InputSourceType.Microphone: { if (this.MicrophoneType == MicType.Photon) { #if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX var hwMicDev = this.PhotonMicrophoneDeviceId; #else var hwMicDev = -1; #endif #if UNITY_STANDALONE_WIN && !UNITY_EDITOR || UNITY_EDITOR_WIN if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to Photon microphone device {0}", hwMicDev); } inputSource = new Windows.WindowsAudioInPusher(hwMicDev); if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to WindowsAudioInPusher"); } break; #elif (UNITY_IOS || UNITY_STANDALONE_OSX) && !UNITY_EDITOR || UNITY_EDITOR_OSX inputSource = new Apple.AppleAudioInPusher(hwMicDev); if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to AppleAudioInPusher"); } break; #elif UNITY_ANDROID && !UNITY_EDITOR inputSource = new UnityAndroidAudioInAEC(); if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to UnityAndroidAudioInAEC"); } break; #else if (this.Logger.IsWarningEnabled) { this.Logger.LogWarning("Photon microphone type is not supported for the current platform. Using Unity microphone."); } #endif } if (Microphone.devices.Length < 1) { if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("No Microphone"); } return(LocalVoiceAudioDummy.Dummy); } var micDev = this.UnityMicrophoneDevice; if (this.Logger.IsInfoEnabled) { this.Logger.LogInfo("Setting recorder's source to microphone device {0}", micDev); } // mic can ignore passed sampling rate and set it's own inputSource = new MicWrapper(micDev, (int)SamplingRate); } break; case InputSourceType.AudioClip: { if (AudioClip == null) { if (this.Logger.IsErrorEnabled) { this.Logger.LogError("AudioClip property must be set for AudioClip audio source"); } return(LocalVoiceAudioDummy.Dummy); } inputSource = new AudioClipWrapper(AudioClip); if (this.LoopAudioClip) { ((AudioClipWrapper)inputSource).Loop = true; } } break; case InputSourceType.Factory: { if (InputFactory == null) { if (this.Logger.IsErrorEnabled) { this.Logger.LogError("Recorder.InputFactory must be specified if Recorder.Source set to Factory"); } return(LocalVoiceAudioDummy.Dummy); } inputSource = InputFactory(); } break; default: if (this.Logger.IsErrorEnabled) { this.Logger.LogError("unknown Source value {0}", SourceType); } return(LocalVoiceAudioDummy.Dummy); } VoiceInfo voiceInfo = VoiceInfo.CreateAudioOpus(SamplingRate, inputSource.SamplingRate, inputSource.Channels, FrameDuration, Bitrate, UserData); return(client.CreateLocalVoiceAudioFromSource(voiceInfo, inputSource, forceShort)); }
internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) : base(voiceClient, encoder, id, voiceInfo, audioSourceDesc, channelId) { // these 2 processors go after resampler this.levelMeter = new AudioUtil.LevelMeterShort(this.info.SamplingRate, this.info.Channels); //1/2 sec this.voiceDetector = new AudioUtil.VoiceDetectorShort(this.info.SamplingRate, this.info.Channels); initBuiltinProcessors(); }
internal LocalVoiceAudio(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) : base(voiceClient, encoder, id, voiceInfo, channelId, voiceInfo.SamplingRate != 0 ? voiceInfo.FrameSize * audioSourceDesc.SamplingRate / voiceInfo.SamplingRate : voiceInfo.FrameSize ) { this.channels = voiceInfo.Channels; if (audioSourceDesc.SamplingRate != voiceInfo.SamplingRate) { this.resampleSource = true; this.voiceClient.transport.LogWarning("[PV] Local voice #" + this.id + " audio source frequency " + audioSourceDesc.SamplingRate + " and encoder sampling rate " + voiceInfo.SamplingRate + " do not match. Resampling will occur before encoding."); } }
/// <summary>Create a new LocalVoiceAudio<T> instance.</summary> /// <param name="voiceClient">The VoiceClient to use for this outgoing stream.</param> /// <param name="voiceId">Numeric ID for this voice.</param> /// <param name="encoder">Encoder to use for this voice.</param> /// <param name="channelId">Voice transport channel ID to use for this voice.</param> /// <returns>The new LocalVoiceAudio<T> instance.</returns> public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) { if (typeof(T) == typeof(float)) { return(new LocalVoiceAudioFloat(voiceClient, encoder, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>); } else if (typeof(T) == typeof(short)) { return(new LocalVoiceAudioShort(voiceClient, encoder, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>); } else { throw new UnsupportedSampleTypeException(typeof(T)); } }
/// <summary>Create a new LocalVoiceAudio<T> instance.</summary> /// <param name="voiceClient">The VoiceClient to use for this outgoing stream.</param> /// <param name="voiceId">Numeric ID for this voice.</param> /// <param name="encoder">Encoder to use for this voice.</param> /// <param name="channelId">Voice transport channel ID to use for this voice.</param> /// <returns>The new LocalVoiceAudio<T> instance.</returns> public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) { if (typeof(T) == typeof(float)) { if (encoder == null || encoder is IEncoderDirect <float> ) { return(new LocalVoiceAudioFloat(voiceClient, encoder as IEncoderDirect <float>, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>); } else { throw new Exception("[PV] CreateLocalVoice: encoder for LocalVoiceAudio<float> is not IEncoderDirect<float>: " + encoder.GetType()); } } else if (typeof(T) == typeof(short)) { if (encoder == null || encoder is IEncoderDirect <short> ) { return(new LocalVoiceAudioShort(voiceClient, encoder as IEncoderDirect <short>, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>); } else { throw new Exception("[PV] CreateLocalVoice: encoder for LocalVoiceAudio<short> is not IEncoderDirect<short>: " + encoder.GetType()); } } else { throw new UnsupportedSampleTypeException(typeof(T)); } }
} // Dummy constructor internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) { } // 0x0000000180F1EE20-0x0000000180F1EFA0
/// <summary> /// Creates outgoing audio stream of type automatically assigned and adds procedures (callback or serviceable) for consuming given audio source data. /// Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler. /// </summary> /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param> /// <param name="source">Streaming audio source.</param> /// <param name="sampleType">Voice's audio sample type. If does not match source audio sample type, conversion will occur.</param> /// <param name="channelId">Transport channel specific to transport.</param> /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param> /// <returns>Outgoing stream handler.</returns> /// <remarks> /// audioSourceDesc.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case. /// </remarks> public LocalVoice CreateLocalVoiceAudioFromSource(VoiceInfo voiceInfo, IAudioDesc source, AudioSampleType sampleType, IEncoder encoder = null, int channelId = 0) { // resolve AudioSampleType.Source to concrete type for encoder creation if (sampleType == AudioSampleType.Source) { if (source is IAudioPusher <float> || source is IAudioReader <float> ) { sampleType = AudioSampleType.Float; } else if (source is IAudioPusher <short> || source is IAudioReader <short> ) { sampleType = AudioSampleType.Short; } } if (encoder == null) { switch (sampleType) { case AudioSampleType.Float: encoder = Platform.CreateDefaultAudioEncoder <float>(transport, voiceInfo); break; case AudioSampleType.Short: encoder = Platform.CreateDefaultAudioEncoder <short>(transport, voiceInfo); break; } } if (source is IAudioPusher <float> ) { if (sampleType == AudioSampleType.Short) { transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioPusher float to short."); var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId); // we can safely reuse the same buffer in callbacks from native code // var bufferFactory = new FactoryReusableArray <float>(0); ((IAudioPusher <float>)source).SetCallback(buf => { var shortBuf = localVoice.BufferFactory.New(buf.Length); AudioUtil.Convert(buf, shortBuf, buf.Length); localVoice.PushDataAsync(shortBuf); }, bufferFactory); return(localVoice); } else { var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId); ((IAudioPusher <float>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice.BufferFactory); return(localVoice); } } else if (source is IAudioPusher <short> ) { if (sampleType == AudioSampleType.Float) { transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioPusher short to float."); var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId); // we can safely reuse the same buffer in callbacks from native code // var bufferFactory = new FactoryReusableArray <short>(0); ((IAudioPusher <short>)source).SetCallback(buf => { var floatBuf = localVoice.BufferFactory.New(buf.Length); AudioUtil.Convert(buf, floatBuf, buf.Length); localVoice.PushDataAsync(floatBuf); }, bufferFactory); return(localVoice); } else { var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId); ((IAudioPusher <short>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice.BufferFactory); return(localVoice); } } else if (source is IAudioReader <float> ) { if (sampleType == AudioSampleType.Short) { transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioReader float to short."); var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPoolFloatToShort(localVoice, source as IAudioReader <float>); return(localVoice); } else { var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPool <float>(localVoice, source as IAudioReader <float>); return(localVoice); } } else if (source is IAudioReader <short> ) { if (sampleType == AudioSampleType.Float) { transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioReader short to float."); var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPoolShortToFloat(localVoice, source as IAudioReader <short>); return(localVoice); } else { var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPool <short>(localVoice, source as IAudioReader <short>); return(localVoice); } } else { transport.LogError("[PV] CreateLocalVoiceAudioFromSource does not support Voice.IAudioDesc of type {0}", source.GetType()); return(LocalVoiceAudioDummy.Dummy); } }
// Methods public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) => default;
} // Dummy constructor internal LocalVoiceAudio(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) { }