public override void OnInspectorGUI() { // Show default inspector property editor VoiceLogger.ExposeLogLevel(serializedObject, connection); EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(updateIntervalSp, new GUIContent("Update Interval (ms)", "time [ms] between consecutive SendOutgoingCommands calls")); EditorGUILayout.PropertyField(enableSupportLoggerSp); #if !UNITY_ANDROID && !UNITY_IOS EditorGUILayout.PropertyField(runInBackground, new GUIContent("Run In Background", "Sets Unity's Application.runInBackground: Should the application keep running when the application is in the background?")); #endif #if !UNITY_IOS EditorGUILayout.PropertyField(keepAliveInBackgroundSp, new GUIContent("Background Timeout (ms)", "Defines for how long the Fallback Thread should keep the connection, before it may time out as usual.")); #endif EditorGUILayout.PropertyField(applyDontDestroyOnLoadSp, new GUIContent("Don't Destroy On Load", "Persists the GameObject across scenes using Unity's GameObject.DontDestoryOnLoad")); EditorGUILayout.PropertyField(primaryRecorderSp, new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default")); connection.SpeakerPrefab = (GameObject)EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), connection.SpeakerPrefab, typeof(GameObject), false); this.DisplayAppSettings(); EditorGUILayout.PropertyField(statsResetInterval, new GUIContent("Stats Reset Interval (ms)", "time [ms] between statistics calculations")); if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } if (Application.isPlaying) { this.DisplayVoiceStats(); this.DisplayDebugInfo(this.connection.Client); this.DisplayCachedVoiceInfo(); this.DisplayTrafficStats(this.connection.Client.LoadBalancingPeer); } }
public override void OnInspectorGUI() { this.serializedObject.UpdateIfRequiredOrScript(); VoiceLogger.ExposeLogLevel(this.serializedObject, this.speaker); EditorGUI.BeginChangeCheck(); if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.speaker.gameObject)) { this.speaker.PlayDelayMs = EditorGUILayout.IntField(new GUIContent("Playback Delay (ms)", "Remote audio stream playback delay to compensate packets latency variations. Try 100 - 200 if sound is choppy. Default is 200ms"), this.speaker.PlayDelayMs); this.speaker.PlaybackOnlyWhenEnabled = EditorGUILayout.Toggle(new GUIContent("Playback Only When Enabled", "If true, component will work only when enabled and active in hierarchy."), this.speaker.PlaybackOnlyWhenEnabled); } else { EditorGUILayout.PropertyField(this.playDelayMsSp, new GUIContent("Playback Delay (ms)", "Remote audio stream playback delay to compensate packets latency variations. Try 100 - 200 if sound is choppy. Default is 200ms")); EditorGUILayout.PropertyField(this.playbackOnlyWhenEnabledSp, new GUIContent("Playback Only When Enabled", "If true, component will work only when enabled and active in hierarchy.")); } if (EditorGUI.EndChangeCheck()) { this.serializedObject.ApplyModifiedProperties(); } if (this.speaker.IsPlaying) { EditorGUILayout.LabelField(string.Format("Current Buffer Lag: {0}", this.speaker.Lag)); this.DrawAnimationCurve(); } }
public override void OnInspectorGUI() { if (processor.VAD && recorder.VoiceDetection) { EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning); } if ((processor.AEC || processor.AECMobile) && recorder.MicrophoneType == Recorder.MicType.Photon) { EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning); } VoiceLogger.ExposeLogLevel(this.serializedObject, processor); EditorGUI.BeginChangeCheck(); processor.Bypass = EditorGUILayout.Toggle(new GUIContent("Bypass", "Bypass WebRTC Audio DSP"), processor.Bypass); processor.AEC = EditorGUILayout.Toggle(new GUIContent("AEC", "Acoustic Echo Cancellation"), processor.AEC); processor.AECMobile = EditorGUILayout.Toggle(new GUIContent("AEC Mobile", "Acoustic Echo Cancellation Mobile"), processor.AECMobile); if (processor.AEC || processor.AECMobile) { processor.ReverseStreamDelayMs = EditorGUILayout.IntField(new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Millieconds"), processor.ReverseStreamDelayMs); } processor.AGC = EditorGUILayout.Toggle(new GUIContent("AGC", "Automatic Gain Control"), processor.AGC); processor.VAD = EditorGUILayout.Toggle(new GUIContent("VAD", "Voice Activity Detection"), processor.VAD); processor.HighPass = EditorGUILayout.Toggle(new GUIContent("HighPass", "High Pass Filter"), processor.HighPass); processor.NoiseSuppression = EditorGUILayout.Toggle(new GUIContent("NoiseSuppression", "Noise Suppression"), processor.NoiseSuppression); if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } }
public override void OnInspectorGUI() { serializedObject.UpdateIfRequiredOrScript(); VoiceLogger.ExposeLogLevel(serializedObject, connection); EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(updateIntervalSp, new GUIContent("Update Interval (ms)", "time [ms] between consecutive SendOutgoingCommands calls")); EditorGUILayout.PropertyField(enableSupportLoggerSp, new GUIContent("Support Logger", "Logs additional info for debugging.\nUse this when you submit bugs to the Photon Team.")); #if !UNITY_ANDROID && !UNITY_IOS EditorGUILayout.PropertyField(runInBackground, new GUIContent("Run In Background", "Sets Unity's Application.runInBackground: Should the application keep running when the application is in the background?")); #endif #if !UNITY_IOS EditorGUILayout.PropertyField(keepAliveInBackgroundSp, new GUIContent("Background Timeout (ms)", "Defines for how long the Fallback Thread should keep the connection, before it may time out as usual.")); #endif EditorGUILayout.PropertyField(applyDontDestroyOnLoadSp, new GUIContent("Don't Destroy On Load", "Persists the GameObject across scenes using Unity's GameObject.DontDestroyOnLoad")); if (Application.isPlaying) { connection.PrimaryRecorder = EditorGUILayout.ObjectField( new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default"), connection.PrimaryRecorder, typeof(Recorder), true) as Recorder; EditorGUILayout.HelpBox("Speaker prefab needs to have a Speaker component in the hierarchy.", MessageType.Info); connection.SpeakerPrefab = EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), connection.SpeakerPrefab, typeof(GameObject), false) as GameObject; } else { EditorGUILayout.PropertyField(this.primaryRecorderSp, new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default")); EditorGUILayout.HelpBox("Speaker prefab needs to have a Speaker component in the hierarchy.", MessageType.Info); GameObject prefab = this.speakerPrefabSp.objectReferenceValue as GameObject; prefab = EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), prefab, typeof(GameObject), false) as GameObject; if (prefab == null || prefab.GetComponentInChildren <Speaker>() != null) { this.speakerPrefabSp.objectReferenceValue = prefab; } else { Debug.LogError("SpeakerPrefab must have a component of type Speaker in its hierarchy.", this); } } this.DisplayAppSettings(); EditorGUILayout.PropertyField(statsResetInterval, new GUIContent("Stats Reset Interval (ms)", "time [ms] between statistics calculations")); if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(connection.gameObject)) { this.DisplayVoiceStats(); this.DisplayDebugInfo(this.connection.Client); this.DisplayCachedVoiceInfo(); this.DisplayTrafficStats(this.connection.Client.LoadBalancingPeer); } }
public override void OnInspectorGUI() { // Show default inspector property editor VoiceLogger.ExposeLogLevel(serializedObject, connection); EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(updateIntervalSp, new GUIContent("Update Interval (ms)", "time [ms] between consecutive SendOutgoingCommands calls")); EditorGUILayout.PropertyField(enableSupportLoggerSp); #if !UNITY_ANDROID && !UNITY_IOS EditorGUILayout.PropertyField(runInBackground, new GUIContent("Run In Background", "Sets Unity's Application.runInBackground: Should the application keep running when the application is in the background?")); #endif #if !UNITY_IOS EditorGUILayout.PropertyField(keepAliveInBackgroundSp, new GUIContent("Background Timeout (ms)", "Defines for how long the Fallback Thread should keep the connection, before it may time out as usual.")); #endif EditorGUILayout.PropertyField(applyDontDestroyOnLoadSp, new GUIContent("Don't Destroy On Load", "Persists the GameObject across scenes using Unity's GameObject.DontDestoryOnLoad")); EditorGUILayout.PropertyField(primaryRecorderSp, new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default")); connection.SpeakerPrefab = (GameObject)EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), connection.SpeakerPrefab, typeof(GameObject), false); #if PHOTON_UNITY_NETWORKING EditorGUILayout.PropertyField(usePunSettingsSp); showSettings = !usePunSettingsSp.boolValue && EditorGUILayout.Foldout(showSettings, new GUIContent("Settings", "Settings to be used by this voice connection")); #else showSettings = EditorGUILayout.Foldout(showSettings, new GUIContent("Settings", "Settings to be used by this voice connection")); #endif if (showSettings) { EditorGUI.indentLevel++; EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("AppIdVoice")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("AppVersion")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("UseNameServer")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("FixedRegion")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("Server")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("Port")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("Protocol")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("EnableLobbyStatistics")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("NetworkLogging")); EditorGUI.indentLevel--; } EditorGUILayout.PropertyField(statsResetInterval, new GUIContent("Stats Reset Interval (ms)", "time [ms] between statistics calculations")); if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } if (Application.isPlaying) { EditorGUILayout.LabelField(string.Format("Frames Received /s: {0}", connection.FramesReceivedPerSecond)); EditorGUILayout.LabelField(string.Format("Frames Lost /s: {0}", connection.FramesLostPerSecond)); EditorGUILayout.LabelField(string.Format("Frames Lost %: {0}", connection.FramesLostPercent)); } }
public override void OnInspectorGUI() { serializedObject.UpdateIfRequiredOrScript(); VoiceLogger.ExposeLogLevel(serializedObject, speaker); EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(playDelayMsSp, new GUIContent("Playback Delay (ms)", "Remote audio stream playback delay to compensate packets latency variations. Try 100 - 200 if sound is choppy. Default is 200ms")); if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } if (speaker.IsPlaying) { EditorGUILayout.LabelField(string.Format("Current Buffer Lag: {0}", speaker.Lag)); DrawAnimationCurve(); } }
public override void OnInspectorGUI() { this.serializedObject.UpdateIfRequiredOrScript(); VoiceLogger.ExposeLogLevel(this.serializedObject, this.connection); EditorGUI.BeginChangeCheck(); this.connection.GlobalRecordersLogLevel = VoiceLogger.ExposeLogLevel(this.globalRecordersLogLevelSp); this.connection.GlobalSpeakersLogLevel = VoiceLogger.ExposeLogLevel(this.globalSpeakersLogLevelSp); EditorGUILayout.PropertyField(this.autoCreateSpeakerIfNotFoundSp, new GUIContent("Create Speaker If Not Found", "Auto instantiate a GameObject and attach a Speaker component to link to a remote audio stream if no candidate could be foun")); EditorGUILayout.PropertyField(this.updateIntervalSp, new GUIContent("Update Interval (ms)", "time [ms] between consecutive SendOutgoingCommands calls")); if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.connection.gameObject)) { this.connection.PrimaryRecorder = EditorGUILayout.ObjectField( new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default"), this.connection.PrimaryRecorder, typeof(Recorder), true) as Recorder; if (this.connection.SpeakerPrefab == null) { EditorGUILayout.HelpBox("Speaker prefab needs to have a Speaker component in the hierarchy.", MessageType.Info); } this.connection.SpeakerPrefab = EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), this.connection.SpeakerPrefab, typeof(GameObject), false) as GameObject; EditorGUILayout.PropertyField(this.globalPlayDelaySettingsSp, new GUIContent("Global Playback Delay Configuration", "Remote audio stream playback delay to compensate packets latency variations."), true); this.connection.SetGlobalPlaybackDelaySettings( this.globalPlayDelaySettingsSp.FindPropertyRelative("MinDelaySoft").intValue, this.globalPlayDelaySettingsSp.FindPropertyRelative("MaxDelaySoft").intValue, this.globalPlayDelaySettingsSp.FindPropertyRelative("MaxDelayHard").intValue); } else { EditorGUILayout.PropertyField(this.enableSupportLoggerSp, new GUIContent("Support Logger", "Logs additional info for debugging.\nUse this when you submit bugs to the Photon Team.")); #if !UNITY_ANDROID && !UNITY_IOS EditorGUILayout.PropertyField(this.runInBackground, new GUIContent("Run In Background", "Sets Unity's Application.runInBackground: Should the application keep running when the application is in the background?")); #endif #if !UNITY_IOS EditorGUILayout.PropertyField(this.keepAliveInBackgroundSp, new GUIContent("Background Timeout (ms)", "Defines for how long the Fallback Thread should keep the connection, before it may time out as usual.")); #endif EditorGUILayout.PropertyField(this.applyDontDestroyOnLoadSp, new GUIContent("Don't Destroy On Load", "Persists the GameObject across scenes using Unity's GameObject.DontDestroyOnLoad")); if (this.applyDontDestroyOnLoadSp.boolValue && !PhotonVoiceEditorUtils.IsPrefab(this.connection.gameObject)) { if (this.connection.transform.parent != null) { EditorGUILayout.HelpBox("DontDestroyOnLoad only works for root GameObjects or components on root GameObjects.", MessageType.Warning); if (GUILayout.Button("Detach")) { this.connection.transform.parent = null; } } } EditorGUILayout.PropertyField(this.primaryRecorderSp, new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default")); GameObject prefab = this.speakerPrefabSp.objectReferenceValue as GameObject; if (prefab == null) { EditorGUILayout.HelpBox("Speaker prefab needs to have a Speaker component in the hierarchy.", MessageType.Info); } prefab = EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), prefab, typeof(GameObject), false) as GameObject; if (prefab == null || prefab.GetComponentInChildren <Speaker>() != null) { this.speakerPrefabSp.objectReferenceValue = prefab; } else { Debug.LogError("SpeakerPrefab must have a component of type Speaker in its hierarchy.", this); } EditorGUILayout.PropertyField(this.globalPlayDelaySettingsSp, new GUIContent("Global Playback Delay Settings", "Remote audio stream playback delay to compensate packets latency variations."), true); } if (!this.connection.Client.IsConnected) { this.DisplayAppSettings(); } EditorGUILayout.PropertyField(this.statsResetInterval, new GUIContent("Stats Reset Interval (ms)", "time [ms] between statistics calculations")); if (EditorGUI.EndChangeCheck()) { this.serializedObject.ApplyModifiedProperties(); } if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.connection.gameObject)) { this.DisplayVoiceStats(); this.DisplayDebugInfo(this.connection.Client); this.DisplayCachedVoiceInfo(); this.DisplayTrafficStats(this.connection.Client.LoadBalancingPeer); } }
public override void OnInspectorGUI() { serializedObject.UpdateIfRequiredOrScript(); //serializedObject.Update(); if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(recorder.gameObject)) { if (recorder.RequiresRestart) { EditorGUILayout.HelpBox("Recorder requires restart. Call Recorder.RestartRecording().", MessageType.Warning); if (GUILayout.Button("RestartRecording")) { recorder.RestartRecording(); } } else if (!recorder.IsInitialized) { EditorGUILayout.HelpBox("Recorder requires initialization. Call Recorder.Init or VoiceConnection.InitRecorder.", MessageType.Warning); } } VoiceLogger.ExposeLogLevel(serializedObject, recorder); EditorGUI.BeginChangeCheck(); if (Application.isPlaying) { recorder.ReactOnSystemChanges = EditorGUILayout.Toggle(new GUIContent("React On System Changes", "If true, recording is restarted when Unity detects Audio Config. changes."), recorder.ReactOnSystemChanges); recorder.TransmitEnabled = EditorGUILayout.Toggle(new GUIContent("Transmit Enabled", "If true, audio transmission is enabled."), recorder.TransmitEnabled); if (recorder.IsInitialized) { recorder.IsRecording = EditorGUILayout.Toggle(new GUIContent("IsRecording", "If true, audio recording is on."), recorder.IsRecording); } else { EditorGUILayout.PropertyField(this.autoStartSp, new GUIContent("Auto Start", "If true, recording is started when Recorder is initialized.")); } if (recorder.IsRecording && recorder.TransmitEnabled) { float amplitude = 0f; if (recorder.IsCurrentlyTransmitting) { amplitude = recorder.LevelMeter.CurrentPeakAmp; } EditorGUILayout.Slider("Level", amplitude, 0, 1); } recorder.Encrypt = EditorGUILayout.Toggle(new GUIContent("Encrypt", "If true, voice stream is sent encrypted."), recorder.Encrypt); recorder.InterestGroup = (byte)EditorGUILayout.IntField(new GUIContent("Interest Group", "Target interest group that will receive transmitted audio."), recorder.InterestGroup); if (recorder.InterestGroup == 0) { recorder.DebugEchoMode = EditorGUILayout.Toggle(new GUIContent("Debug Echo", "If true, outgoing stream routed back to client via server same way as for remote client's streams."), recorder.DebugEchoMode); } recorder.ReliableMode = EditorGUILayout.Toggle(new GUIContent("Reliable Mode", "If true, stream data sent in reliable mode."), recorder.ReliableMode); EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel); recorder.FrameDuration = (OpusCodec.FrameDuration)EditorGUILayout.EnumPopup(new GUIContent("Frame Duration", "Outgoing audio stream encoder delay."), recorder.FrameDuration); recorder.SamplingRate = (POpusCodec.Enums.SamplingRate)EditorGUILayout.EnumPopup( new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate."), recorder.SamplingRate); recorder.Bitrate = EditorGUILayout.IntField(new GUIContent("Bitrate", "Outgoing audio stream bitrate."), recorder.Bitrate); EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel); recorder.SourceType = (Recorder.InputSourceType)EditorGUILayout.EnumPopup(new GUIContent("Input Source Type", "Input audio data source type"), recorder.SourceType); switch (recorder.SourceType) { case Recorder.InputSourceType.Microphone: recorder.MicrophoneType = (Recorder.MicType)EditorGUILayout.EnumPopup( new GUIContent("Microphone Type", "Which microphone API to use when the Source is set to Microphone."), recorder.MicrophoneType); EditorGUILayout.HelpBox("Devices list and current selection is valid in Unity Editor only. In build, you need to set it via code preferably at runtime.", MessageType.Info); switch (recorder.MicrophoneType) { case Recorder.MicType.Unity: if (Microphone.devices.Length == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { unityMicrophoneDeviceIndex = EditorGUILayout.Popup("Microphone Device", unityMicrophoneDeviceIndex, Microphone.devices); recorder.UnityMicrophoneDevice = Microphone.devices[unityMicrophoneDeviceIndex]; int minFreq, maxFreq; Microphone.GetDeviceCaps(Microphone.devices[unityMicrophoneDeviceIndex], out minFreq, out maxFreq); EditorGUILayout.LabelField("Microphone Device Caps", string.Format("{0}..{1} Hz", minFreq, maxFreq)); } break; case Recorder.MicType.Photon: #if PHOTON_MICROPHONE_ENUMERATOR if (Recorder.PhotonMicrophoneEnumerator.IsSupported) { if (Recorder.PhotonMicrophoneEnumerator.Count == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { EditorGUILayout.BeginHorizontal(); photonDeviceIndex = EditorGUILayout.Popup("Microphone Device", photonDeviceIndex, photonDeviceNames); recorder.PhotonMicrophoneDeviceId = photonDeviceIDs[photonDeviceIndex]; if (GUILayout.Button("Refresh", EditorStyles.miniButton, GUILayout.Width(70))) { this.RefreshPhotonMicrophoneDevices(); } EditorGUILayout.EndHorizontal(); } } else { recorder.PhotonMicrophoneDeviceId = -1; EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error); } #endif #if UNITY_IOS EditorGUILayout.LabelField("iOS Audio Session Parameters", EditorStyles.boldLabel); EditorGUI.indentLevel++; EditorGUILayout.PropertyField(useCustomAudioSessionParametersSp, new GUIContent("Use Custom")); if (useCustomAudioSessionParametersSp.boolValue) { EditorGUILayout.PropertyField(audioSessionParametersCategorySp); EditorGUILayout.PropertyField(audioSessionParametersModeSp); EditorGUILayout.PropertyField(audioSessionParametersCategoryOptionsSp, true); } else { int index = EditorGUILayout.Popup("Preset", audioSessionPresetIndexSp.intValue, iOSAudioSessionPresetsNames); if (index != audioSessionPresetIndexSp.intValue) { audioSessionPresetIndexSp.intValue = index; AudioSessionParameters parameters = iOSAudioSessionPresetsValues[index]; this.SetEnumIndex(audioSessionParametersCategorySp, typeof(AudioSessionCategory), parameters.Category); this.SetEnumIndex(audioSessionParametersModeSp, typeof(AudioSessionMode), parameters.Mode); if (parameters.CategoryOptions != null) { audioSessionParametersCategoryOptionsSp.ClearArray(); audioSessionParametersCategoryOptionsSp.arraySize = parameters.CategoryOptions.Length; if (index == 0) { this.SetEnumIndex(audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.DefaultToSpeaker); this.SetEnumIndex(audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(1), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } else if (index == 1) { this.SetEnumIndex(audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } } } } EditorGUI.indentLevel--; #endif break; default: throw new ArgumentOutOfRangeException(); } break; case Recorder.InputSourceType.AudioClip: recorder.AudioClip = EditorGUILayout.ObjectField(new GUIContent("Audio Clip", "Source audio clip."), recorder.AudioClip, typeof(AudioClip), false) as AudioClip; recorder.LoopAudioClip = EditorGUILayout.Toggle(new GUIContent("Loop", "Loop playback for audio clip sources."), recorder.LoopAudioClip); break; case Recorder.InputSourceType.Factory: EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info); break; default: throw new ArgumentOutOfRangeException(); } EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel); recorder.VoiceDetection = EditorGUILayout.Toggle(new GUIContent("Detect", "If true, voice detection enabled."), recorder.VoiceDetection); if (recorder.VoiceDetection) { recorder.VoiceDetectionThreshold = EditorGUILayout.Slider( new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."), recorder.VoiceDetectionThreshold, 0f, 1f); recorder.VoiceDetectionDelayMs = EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), recorder.VoiceDetectionDelayMs); EditorGUILayout.HelpBox("Do not speak and stay in a silent environment when calibrating.", MessageType.Info); if (recorder.VoiceDetectorCalibrating) { EditorGUILayout.LabelField(string.Format("Calibrating {0} ms", calibrationTime)); } else { calibrationTime = EditorGUILayout.IntField("Calibration Time (ms)", calibrationTime); if (recorder.IsRecording && recorder.TransmitEnabled) { if (GUILayout.Button("Calibrate")) { recorder.VoiceDetectorCalibrate(calibrationTime); } } } } } else { EditorGUILayout.PropertyField(this.reactOnSystemChangesSp, new GUIContent("React On System Changes", "If true, recording is restarted when Unity detects Audio Config. changes.")); EditorGUILayout.PropertyField(this.transmitEnabledSp, new GUIContent("Transmit Enabled", "If true, audio transmission is enabled.")); EditorGUILayout.PropertyField(this.autoStartSp, new GUIContent("Auto Start", "If true, recording is started when Recorder is initialized.")); EditorGUILayout.PropertyField(this.encryptSp, new GUIContent("Encrypt", "If true, voice stream is sent encrypted.")); EditorGUILayout.PropertyField(this.interestGroupSp, new GUIContent("Interest Group", "Target interest group that will receive transmitted audio.")); if (this.interestGroupSp.intValue == 0) { EditorGUILayout.PropertyField(this.debugEchoModeSp, new GUIContent("Debug Echo", "If true, outgoing stream routed back to client via server same way as for remote client's streams.")); } else if (this.debugEchoModeSp.boolValue) { Debug.LogWarningFormat("DebugEchoMode disabled because InterestGroup changed to {0}. DebugEchoMode works only with Interest Group 0.", this.interestGroupSp.intValue); this.debugEchoModeSp.boolValue = false; } EditorGUILayout.PropertyField(this.reliableModeSp, new GUIContent("Reliable Mode", "If true, stream data sent in reliable mode.")); EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel); EditorGUILayout.PropertyField(this.frameDurationSp, new GUIContent("Frame Duration", "Outgoing audio stream encoder delay.")); EditorGUILayout.PropertyField(this.samplingRateSp, new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate.")); EditorGUILayout.PropertyField(this.bitrateSp, new GUIContent("Bitrate", "Outgoing audio stream bitrate.")); EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel); EditorGUILayout.PropertyField(this.sourceTypeSp, new GUIContent("Input Source Type", "Input audio data source type")); switch ((Recorder.InputSourceType) this.sourceTypeSp.enumValueIndex) { case Recorder.InputSourceType.Microphone: EditorGUILayout.PropertyField(this.microphoneTypeSp, new GUIContent("Microphone Type", "Which microphone API to use when the Source is set to Microphone.")); EditorGUILayout.HelpBox("Devices list and current selection is valid in Unity Editor only. In build, you need to set it via code preferably at runtime.", MessageType.Info); switch (recorder.MicrophoneType) { case Recorder.MicType.Unity: if (Microphone.devices.Length == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { unityMicrophoneDeviceIndex = EditorGUILayout.Popup("Microphone Device", unityMicrophoneDeviceIndex, Microphone.devices); this.unityMicrophoneDeviceSp.stringValue = Microphone.devices[unityMicrophoneDeviceIndex]; int minFreq, maxFreq; Microphone.GetDeviceCaps(Microphone.devices[unityMicrophoneDeviceIndex], out minFreq, out maxFreq); EditorGUILayout.LabelField("Microphone Device Caps", string.Format("{0}..{1} Hz", minFreq, maxFreq)); } break; case Recorder.MicType.Photon: #if PHOTON_MICROPHONE_ENUMERATOR if (Recorder.PhotonMicrophoneEnumerator.IsSupported) { if (Recorder.PhotonMicrophoneEnumerator.Count == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { EditorGUILayout.BeginHorizontal(); photonDeviceIndex = EditorGUILayout.Popup("Microphone Device", photonDeviceIndex, photonDeviceNames); this.photonMicrophoneDeviceIdSp.intValue = photonDeviceIDs[photonDeviceIndex]; if (GUILayout.Button("Refresh", EditorStyles.miniButton, GUILayout.Width(70))) { this.RefreshPhotonMicrophoneDevices(); } EditorGUILayout.EndHorizontal(); } } else { recorder.PhotonMicrophoneDeviceId = -1; EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error); } #endif #if UNITY_IOS EditorGUILayout.LabelField("iOS Audio Session Parameters", EditorStyles.boldLabel); EditorGUI.indentLevel++; EditorGUILayout.PropertyField(useCustomAudioSessionParametersSp, new GUIContent("Use Custom")); if (useCustomAudioSessionParametersSp.boolValue) { EditorGUILayout.PropertyField(audioSessionParametersCategorySp); EditorGUILayout.PropertyField(audioSessionParametersModeSp); EditorGUILayout.PropertyField(audioSessionParametersCategoryOptionsSp, true); } else { int index = EditorGUILayout.Popup("Preset", audioSessionPresetIndexSp.intValue, iOSAudioSessionPresetsNames); if (index != audioSessionPresetIndexSp.intValue) { audioSessionPresetIndexSp.intValue = index; AudioSessionParameters parameters = iOSAudioSessionPresetsValues[index]; this.SetEnumIndex(audioSessionParametersCategorySp, typeof(AudioSessionCategory), parameters.Category); this.SetEnumIndex(audioSessionParametersModeSp, typeof(AudioSessionMode), parameters.Mode); if (parameters.CategoryOptions != null) { audioSessionParametersCategoryOptionsSp.ClearArray(); audioSessionParametersCategoryOptionsSp.arraySize = parameters.CategoryOptions.Length; if (index == 0) { this.SetEnumIndex(audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.DefaultToSpeaker); this.SetEnumIndex(audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(1), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } else if (index == 1) { this.SetEnumIndex(audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } } } } EditorGUI.indentLevel--; #endif break; default: throw new ArgumentOutOfRangeException(); } break; case Recorder.InputSourceType.AudioClip: EditorGUILayout.PropertyField(this.audioClipSp, new GUIContent("Audio Clip", "Source audio clip.")); EditorGUILayout.PropertyField(this.loopAudioClipSp, new GUIContent("Loop", "Loop playback for audio clip sources.")); break; case Recorder.InputSourceType.Factory: EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info); break; default: throw new ArgumentOutOfRangeException(); } EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel); EditorGUILayout.PropertyField(this.voiceDetectionSp, new GUIContent("Detect", "If true, voice detection enabled.")); if (this.voiceDetectionSp.boolValue) { this.voiceDetectionThresholdSp.floatValue = EditorGUILayout.Slider( new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."), this.voiceDetectionThresholdSp.floatValue, 0f, 1f); this.voiceDetectionDelayMsSp.intValue = EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), this.voiceDetectionDelayMsSp.intValue); } } if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } }
public override void OnInspectorGUI() { // Show default inspector property editor VoiceLogger.ExposeLogLevel(serializedObject, connection); EditorGUI.BeginChangeCheck(); EditorGUILayout.PropertyField(updateIntervalSp, new GUIContent("Update Interval (ms)", "time [ms] between consecutive SendOutgoingCommands calls")); EditorGUILayout.PropertyField(enableSupportLoggerSp); #if !UNITY_ANDROID && !UNITY_IOS EditorGUILayout.PropertyField(runInBackground, new GUIContent("Run In Background", "Sets Unity's Application.runInBackground: Should the application keep running when the application is in the background?")); #endif #if !UNITY_IOS EditorGUILayout.PropertyField(keepAliveInBackgroundSp, new GUIContent("Background Timeout (ms)", "Defines for how long the Fallback Thread should keep the connection, before it may time out as usual.")); #endif EditorGUILayout.PropertyField(applyDontDestroyOnLoadSp, new GUIContent("Don't Destroy On Load", "Persists the GameObject across scenes using Unity's GameObject.DontDestoryOnLoad")); EditorGUILayout.PropertyField(primaryRecorderSp, new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default")); connection.SpeakerPrefab = (GameObject)EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab", "Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), connection.SpeakerPrefab, typeof(GameObject), false); #if PHOTON_UNITY_NETWORKING EditorGUILayout.PropertyField(usePunSettingsSp); connection.ShowSettings = !usePunSettingsSp.boolValue && EditorGUILayout.Foldout(connection.ShowSettings, new GUIContent("Settings", "Settings to be used by this voice connection")); #else connection.ShowSettings = EditorGUILayout.Foldout(connection.ShowSettings, new GUIContent("Settings", "Settings to be used by this voice connection")); #endif if (connection.ShowSettings) { EditorGUI.indentLevel++; EditorGUILayout.BeginHorizontal(); SerializedProperty sP = settingsSp.FindPropertyRelative("AppIdVoice"); EditorGUILayout.PropertyField(sP); string appId = sP.stringValue; string url = "https://dashboard.photonengine.com/en-US/PublicCloud"; if (!string.IsNullOrEmpty(appId)) { url = string.Format("https://dashboard.photonengine.com/en-US/App/Manage/{0}", appId); } if (GUILayout.Button("Dashboard", EditorStyles.miniButton, GUILayout.Width(70))) { Application.OpenURL(url); } EditorGUILayout.EndHorizontal(); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("AppVersion")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("UseNameServer")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("FixedRegion")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("Server")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("Port")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("Protocol")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("EnableLobbyStatistics")); EditorGUILayout.PropertyField(settingsSp.FindPropertyRelative("NetworkLogging")); EditorGUI.indentLevel--; } EditorGUILayout.PropertyField(statsResetInterval, new GUIContent("Stats Reset Interval (ms)", "time [ms] between statistics calculations")); if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } if (Application.isPlaying) { this.DisplayVoiceStats(); this.DisplayDebugInfo(this.connection.Client); this.DisplayCachedVoiceInfo(); } }
public override void OnInspectorGUI() { this.serializedObject.UpdateIfRequiredOrScript(); //serializedObject.Update(); WebRtcAudioDsp webRtcAudioDsp = this.recorder.GetComponent <WebRtcAudioDsp>(); bool webRtcAudioDspAttached = webRtcAudioDsp && webRtcAudioDsp != null && webRtcAudioDsp.enabled; if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.recorder.gameObject)) { if (this.recorder.RequiresRestart) { EditorGUILayout.HelpBox("Recorder requires restart. Call Recorder.RestartRecording().", MessageType.Warning); if (GUILayout.Button("RestartRecording")) { this.recorder.RestartRecording(); } } else if (!this.recorder.IsInitialized) { EditorGUILayout.HelpBox("Recorder requires initialization. Call Recorder.Init or VoiceConnection.InitRecorder.", MessageType.Warning); } } VoiceLogger.ExposeLogLevel(this.serializedObject, this.recorder); EditorGUI.BeginChangeCheck(); if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.recorder.gameObject)) { #if !UNITY_ANDROID && !UNITY_IOS this.recorder.ReactOnSystemChanges = EditorGUILayout.Toggle(new GUIContent("React On System Changes", "If true, recording is restarted when Unity detects Audio Config. changes."), this.recorder.ReactOnSystemChanges); if (this.recorder.ReactOnSystemChanges) { EditorGUI.indentLevel++; EditorGUILayout.PropertyField(this.skipDeviceChecksSp, new GUIContent("Skip Device Checks", "If true, restarts recording without checking if audio config/device changes affected recording.")); EditorGUI.indentLevel--; } #endif this.recorder.RecordOnlyWhenEnabled = EditorGUILayout.Toggle(new GUIContent("Record Only When Enabled", "If true, component will work only when enabled and active in hierarchy."), this.recorder.RecordOnlyWhenEnabled); EditorGUILayout.PropertyField(this.stopRecordingWhenPausedSp, new GUIContent("Stop Recording When Paused", "If true, stop recording when paused resume/restart when un-paused.")); this.recorder.TransmitEnabled = EditorGUILayout.Toggle(new GUIContent("Transmit Enabled", "If true, audio transmission is enabled."), this.recorder.TransmitEnabled); if (this.recorder.IsInitialized) { this.recorder.IsRecording = EditorGUILayout.Toggle(new GUIContent("IsRecording", "If true, audio recording is on."), this.recorder.IsRecording); } else { EditorGUILayout.PropertyField(this.autoStartSp, new GUIContent("Auto Start", "If true, recording is started when Recorder is initialized.")); } if (this.recorder.IsRecording && this.recorder.TransmitEnabled) { float amplitude = 0f; if (this.recorder.IsCurrentlyTransmitting) { amplitude = this.recorder.LevelMeter.CurrentPeakAmp; } EditorGUILayout.Slider("Level", amplitude, 0, 1); } this.recorder.Encrypt = EditorGUILayout.Toggle(new GUIContent("Encrypt", "If true, voice stream is sent encrypted."), this.recorder.Encrypt); this.recorder.InterestGroup = (byte)EditorGUILayout.IntField(new GUIContent("Interest Group", "Target interest group that will receive transmitted audio."), this.recorder.InterestGroup); if (this.recorder.InterestGroup == 0) { this.recorder.DebugEchoMode = EditorGUILayout.Toggle(new GUIContent("Debug Echo", "If true, outgoing stream routed back to client via server same way as for remote client's streams."), this.recorder.DebugEchoMode); } this.recorder.ReliableMode = EditorGUILayout.Toggle(new GUIContent("Reliable Mode", "If true, stream data sent in reliable mode."), this.recorder.ReliableMode); EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel); this.recorder.FrameDuration = (OpusCodec.FrameDuration)EditorGUILayout.EnumPopup(new GUIContent("Frame Duration", "Outgoing audio stream encoder delay."), this.recorder.FrameDuration); this.recorder.SamplingRate = (POpusCodec.Enums.SamplingRate)EditorGUILayout.EnumPopup( new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate."), this.recorder.SamplingRate); this.recorder.Bitrate = EditorGUILayout.IntField(new GUIContent("Bitrate", "Outgoing audio stream bitrate."), this.recorder.Bitrate); EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel); this.recorder.SourceType = (Recorder.InputSourceType)EditorGUILayout.EnumPopup(new GUIContent("Input Source Type", "Input audio data source type"), this.recorder.SourceType); switch (this.recorder.SourceType) { case Recorder.InputSourceType.Microphone: this.recorder.MicrophoneType = (Recorder.MicType)EditorGUILayout.EnumPopup( new GUIContent("Microphone Type", "Which microphone API to use when the Source is set to UnityMicrophone."), this.recorder.MicrophoneType); this.recorder.UseMicrophoneTypeFallback = EditorGUILayout.Toggle(new GUIContent("Use Fallback", "If true, if recording fails to start with Unity microphone type, Photon microphone type is used -if available- as a fallback and vice versa."), this.recorder.UseMicrophoneTypeFallback); EditorGUILayout.HelpBox("Devices list and current selection is valid in Unity Editor only. In build, you need to set it via code preferably at runtime.", MessageType.Info); switch (this.recorder.MicrophoneType) { case Recorder.MicType.Unity: if (UnityMicrophone.devices.Length == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { this.unityMicrophoneDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.GetUnityMicrophoneDeviceIndex(), UnityMicrophone.devices); this.recorder.UnityMicrophoneDevice = UnityMicrophone.devices[this.unityMicrophoneDeviceIndex]; int minFreq, maxFreq; UnityMicrophone.GetDeviceCaps(UnityMicrophone.devices[this.unityMicrophoneDeviceIndex], out minFreq, out maxFreq); EditorGUILayout.LabelField("Microphone Device Caps", string.Format("{0}..{1} Hz", minFreq, maxFreq)); } break; case Recorder.MicType.Photon: #if PHOTON_MICROPHONE_ENUMERATOR if (Recorder.PhotonMicrophoneEnumerator.IsSupported) { if (Recorder.PhotonMicrophoneEnumerator.Count == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { EditorGUILayout.BeginHorizontal(); this.photonDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.photonDeviceIndex, this.photonDeviceNames); this.recorder.PhotonMicrophoneDeviceId = this.photonDeviceIDs[this.photonDeviceIndex]; if (GUILayout.Button("Refresh", EditorStyles.miniButton, GUILayout.Width(70))) { this.RefreshPhotonMicrophoneDevices(); } EditorGUILayout.EndHorizontal(); } } else { this.recorder.PhotonMicrophoneDeviceId = -1; EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error); } #endif #if UNITY_IOS EditorGUILayout.LabelField("iOS Audio Session Parameters", EditorStyles.boldLabel); EditorGUI.indentLevel++; EditorGUILayout.PropertyField(this.useCustomAudioSessionParametersSp, new GUIContent("Use Custom")); if (this.useCustomAudioSessionParametersSp.boolValue) { EditorGUILayout.PropertyField(this.audioSessionParametersCategorySp); EditorGUILayout.PropertyField(this.audioSessionParametersModeSp); EditorGUILayout.PropertyField(this.audioSessionParametersCategoryOptionsSp, true); } else { int index = EditorGUILayout.Popup("Preset", this.audioSessionPresetIndexSp.intValue, this.iOSAudioSessionPresetsNames); if (index != this.audioSessionPresetIndexSp.intValue) { this.audioSessionPresetIndexSp.intValue = index; AudioSessionParameters parameters = this.iOSAudioSessionPresetsValues[index]; this.SetEnumIndex(this.audioSessionParametersCategorySp, typeof(AudioSessionCategory), parameters.Category); this.SetEnumIndex(this.audioSessionParametersModeSp, typeof(AudioSessionMode), parameters.Mode); if (parameters.CategoryOptions != null) { this.audioSessionParametersCategoryOptionsSp.ClearArray(); this.audioSessionParametersCategoryOptionsSp.arraySize = parameters.CategoryOptions.Length; if (index == 0) { this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.DefaultToSpeaker); this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(1), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } else if (index == 1) { this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } } } } EditorGUI.indentLevel--; #elif UNITY_ANDROID EditorGUILayout.LabelField("Android Native Microphone Settings", EditorStyles.boldLabel); EditorGUI.indentLevel++; EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AcousticEchoCancellation")); EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AutomaticGainControl")); EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("NoiseSuppression")); EditorGUI.indentLevel--; #endif break; default: throw new ArgumentOutOfRangeException(); } break; case Recorder.InputSourceType.AudioClip: this.recorder.AudioClip = EditorGUILayout.ObjectField(new GUIContent("Audio Clip", "Source audio clip."), this.recorder.AudioClip, typeof(AudioClip), false) as AudioClip; this.recorder.LoopAudioClip = EditorGUILayout.Toggle(new GUIContent("Loop", "Loop playback for audio clip sources."), this.recorder.LoopAudioClip); break; case Recorder.InputSourceType.Factory: EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info); break; default: throw new ArgumentOutOfRangeException(); } EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel); if (webRtcAudioDspAttached) { if (webRtcAudioDsp.VAD) { EditorGUILayout.HelpBox("WebRtcAudioDsp.VAD is already enabled no need to use the built-in Recorder VAD", MessageType.Info); } else { EditorGUILayout.HelpBox("It's recommended to use VAD from WebRtcAudioDsp instead of built-in Recorder VAD", MessageType.Info); } } this.recorder.VoiceDetection = EditorGUILayout.Toggle(new GUIContent("Detect", "If true, voice detection enabled."), this.recorder.VoiceDetection); if (this.recorder.VoiceDetection) { if (webRtcAudioDspAttached && !webRtcAudioDsp.VAD && GUILayout.Button("Use WebRtcAudioDsp.VAD instead")) { this.recorder.VoiceDetection = false; webRtcAudioDsp.VAD = true; } this.recorder.VoiceDetectionThreshold = EditorGUILayout.Slider( new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."), this.recorder.VoiceDetectionThreshold, 0f, 1f); this.recorder.VoiceDetectionDelayMs = EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), this.recorder.VoiceDetectionDelayMs); EditorGUILayout.HelpBox("Do not speak and stay in a silent environment when calibrating.", MessageType.Info); if (this.recorder.VoiceDetectorCalibrating) { EditorGUILayout.LabelField(string.Format("Calibrating {0} ms", this.calibrationTime)); } else { this.calibrationTime = EditorGUILayout.IntField("Calibration Time (ms)", this.calibrationTime); if (this.recorder.IsRecording && this.recorder.TransmitEnabled) { if (GUILayout.Button("Calibrate")) { this.recorder.VoiceDetectorCalibrate(this.calibrationTime); } } } } } else { #if !UNITY_ANDROID && !UNITY_IOS EditorGUILayout.PropertyField(this.reactOnSystemChangesSp, new GUIContent("React On System Changes", "If true, recording is restarted when Unity detects Audio Config. changes.")); if (this.reactOnSystemChangesSp.boolValue) { EditorGUI.indentLevel++; EditorGUILayout.PropertyField(this.skipDeviceChecksSp, new GUIContent("Skip Device Checks", "If true, restarts recording without checking if audio config/device changes affected recording.")); EditorGUI.indentLevel--; } #endif EditorGUILayout.PropertyField(this.recordOnlyWhenEnabledSp, new GUIContent("Record Only When Enabled", "If true, component will work only when enabled and active in hierarchy.")); EditorGUILayout.PropertyField(this.stopRecordingWhenPausedSp, new GUIContent("Stop Recording When Paused", "If true, stop recording when paused resume/restart when un-paused.")); EditorGUILayout.PropertyField(this.transmitEnabledSp, new GUIContent("Transmit Enabled", "If true, audio transmission is enabled.")); EditorGUILayout.PropertyField(this.autoStartSp, new GUIContent("Auto Start", "If true, recording is started when Recorder is initialized.")); EditorGUILayout.PropertyField(this.encryptSp, new GUIContent("Encrypt", "If true, voice stream is sent encrypted.")); EditorGUILayout.PropertyField(this.interestGroupSp, new GUIContent("Interest Group", "Target interest group that will receive transmitted audio.")); if (this.interestGroupSp.intValue == 0) { EditorGUILayout.PropertyField(this.debugEchoModeSp, new GUIContent("Debug Echo", "If true, outgoing stream routed back to client via server same way as for remote client's streams.")); } else if (this.debugEchoModeSp.boolValue) { Debug.LogWarningFormat("DebugEchoMode disabled because InterestGroup changed to {0}. DebugEchoMode works only with Interest Group 0.", this.interestGroupSp.intValue); this.debugEchoModeSp.boolValue = false; } EditorGUILayout.PropertyField(this.reliableModeSp, new GUIContent("Reliable Mode", "If true, stream data sent in reliable mode.")); EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel); EditorGUILayout.PropertyField(this.frameDurationSp, new GUIContent("Frame Duration", "Outgoing audio stream encoder delay.")); EditorGUILayout.PropertyField(this.samplingRateSp, new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate.")); EditorGUILayout.PropertyField(this.bitrateSp, new GUIContent("Bitrate", "Outgoing audio stream bitrate.")); EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel); EditorGUILayout.PropertyField(this.sourceTypeSp, new GUIContent("Input Source Type", "Input audio data source type")); switch ((Recorder.InputSourceType) this.sourceTypeSp.enumValueIndex) { case Recorder.InputSourceType.Microphone: EditorGUILayout.PropertyField(this.microphoneTypeSp, new GUIContent("Microphone Type", "Which microphone API to use when the Source is set to UnityMicrophone.")); EditorGUILayout.PropertyField(this.useMicrophoneTypeFallbackSp, new GUIContent("Use Fallback", "If true, if recording fails to start with Unity microphone type, Photon microphone type is used -if available- as a fallback and vice versa.")); EditorGUILayout.HelpBox("Devices list and current selection is valid in Unity Editor only. In build, you need to set it via code preferably at runtime.", MessageType.Info); switch (this.recorder.MicrophoneType) { case Recorder.MicType.Unity: if (UnityMicrophone.devices.Length == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { this.unityMicrophoneDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.GetUnityMicrophoneDeviceIndex(), UnityMicrophone.devices); this.unityMicrophoneDeviceSp.stringValue = UnityMicrophone.devices[this.unityMicrophoneDeviceIndex]; int minFreq, maxFreq; UnityMicrophone.GetDeviceCaps(UnityMicrophone.devices[this.unityMicrophoneDeviceIndex], out minFreq, out maxFreq); EditorGUILayout.LabelField("Microphone Device Caps", string.Format("{0}..{1} Hz", minFreq, maxFreq)); } break; case Recorder.MicType.Photon: #if PHOTON_MICROPHONE_ENUMERATOR if (Recorder.PhotonMicrophoneEnumerator.IsSupported) { if (Recorder.PhotonMicrophoneEnumerator.Count == 0) { EditorGUILayout.HelpBox("No microphone device found", MessageType.Error); } else { EditorGUILayout.BeginHorizontal(); this.photonDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.photonDeviceIndex, this.photonDeviceNames); this.photonMicrophoneDeviceIdSp.intValue = this.photonDeviceIDs[this.photonDeviceIndex]; if (GUILayout.Button("Refresh", EditorStyles.miniButton, GUILayout.Width(70))) { this.RefreshPhotonMicrophoneDevices(); } EditorGUILayout.EndHorizontal(); } } else { this.recorder.PhotonMicrophoneDeviceId = -1; EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error); } #endif #if UNITY_IOS EditorGUILayout.LabelField("iOS Audio Session Parameters", EditorStyles.boldLabel); EditorGUI.indentLevel++; EditorGUILayout.PropertyField(this.useCustomAudioSessionParametersSp, new GUIContent("Use Custom")); if (this.useCustomAudioSessionParametersSp.boolValue) { EditorGUILayout.PropertyField(this.audioSessionParametersCategorySp); EditorGUILayout.PropertyField(this.audioSessionParametersModeSp); EditorGUILayout.PropertyField(this.audioSessionParametersCategoryOptionsSp, true); } else { int index = EditorGUILayout.Popup("Preset", this.audioSessionPresetIndexSp.intValue, this.iOSAudioSessionPresetsNames); if (index != this.audioSessionPresetIndexSp.intValue) { this.audioSessionPresetIndexSp.intValue = index; AudioSessionParameters parameters = this.iOSAudioSessionPresetsValues[index]; this.SetEnumIndex(this.audioSessionParametersCategorySp, typeof(AudioSessionCategory), parameters.Category); this.SetEnumIndex(this.audioSessionParametersModeSp, typeof(AudioSessionMode), parameters.Mode); if (parameters.CategoryOptions != null) { this.audioSessionParametersCategoryOptionsSp.ClearArray(); this.audioSessionParametersCategoryOptionsSp.arraySize = parameters.CategoryOptions.Length; if (index == 0) { this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.DefaultToSpeaker); this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(1), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } else if (index == 1) { this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp .GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth); } } } } EditorGUI.indentLevel--; #elif UNITY_ANDROID EditorGUILayout.LabelField("Android Native Microphone Settings", EditorStyles.boldLabel); EditorGUI.indentLevel++; EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AcousticEchoCancellation")); EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AutomaticGainControl")); EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("NoiseSuppression")); EditorGUI.indentLevel--; #endif break; default: throw new ArgumentOutOfRangeException(); } break; case Recorder.InputSourceType.AudioClip: EditorGUILayout.PropertyField(this.audioClipSp, new GUIContent("Audio Clip", "Source audio clip.")); EditorGUILayout.PropertyField(this.loopAudioClipSp, new GUIContent("Loop", "Loop playback for audio clip sources.")); break; case Recorder.InputSourceType.Factory: EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info); break; default: throw new ArgumentOutOfRangeException(); } EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel); if (webRtcAudioDspAttached) { if (webRtcAudioDsp.VAD) { EditorGUILayout.HelpBox("WebRtcAudioDsp.VAD is already enabled no need to use the built-in Recorder VAD", MessageType.Info); } else { EditorGUILayout.HelpBox("It's recommended to use VAD from WebRtcAudioDsp instead of built-in Recorder VAD", MessageType.Info); } } EditorGUILayout.PropertyField(this.voiceDetectionSp, new GUIContent("Detect", "If true, voice detection enabled.")); if (this.voiceDetectionSp.boolValue) { if (webRtcAudioDspAttached && !webRtcAudioDsp.VAD && GUILayout.Button("Use WebRtcAudioDsp.VAD instead")) { this.recorder.VoiceDetection = false; webRtcAudioDsp.VAD = true; } this.voiceDetectionThresholdSp.floatValue = EditorGUILayout.Slider( new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."), this.voiceDetectionThresholdSp.floatValue, 0f, 1f); this.voiceDetectionDelayMsSp.intValue = EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), this.voiceDetectionDelayMsSp.intValue); } } if (EditorGUI.EndChangeCheck()) { this.serializedObject.ApplyModifiedProperties(); } }
public override void OnInspectorGUI() { serializedObject.UpdateIfRequiredOrScript(); VoiceLogger.ExposeLogLevel(this.serializedObject, processor); bool bypassed = false; EditorGUI.BeginChangeCheck(); if (Application.isPlaying) { processor.Bypass = EditorGUILayout.Toggle(new GUIContent("Bypass", "Bypass WebRTC Audio DSP"), processor.Bypass); bypassed = processor.Bypass; } else { EditorGUILayout.PropertyField(this.bypassSp, new GUIContent("Bypass", "Bypass WebRTC Audio DSP")); bypassed = this.bypassSp.boolValue; } if (!bypassed) { if (Application.isPlaying) { processor.AEC = EditorGUILayout.Toggle(new GUIContent("AEC", "Acoustic Echo Cancellation"), processor.AEC); processor.AECMobile = EditorGUILayout.Toggle(new GUIContent("AEC Mobile", "Acoustic Echo Cancellation Mobile"), processor.AECMobile); if (processor.AEC || processor.AECMobile) { if (recorder.MicrophoneType == Recorder.MicType.Photon) { EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning); } processor.ReverseStreamDelayMs = EditorGUILayout.IntField(new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Milliseconds"), processor.ReverseStreamDelayMs); } if (processor.AECMobile) { processor.AECMobileComfortNoise = EditorGUILayout.Toggle(new GUIContent("AEC Mobile Comfort Noise", "Acoustic Echo Cancellation Mobile Comfort Noise"), processor.AECMobileComfortNoise); } processor.AGC = EditorGUILayout.Toggle(new GUIContent("AGC", "Automatic Gain Control"), processor.AGC); if (processor.VAD && recorder.VoiceDetection) { EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning); } processor.VAD = EditorGUILayout.Toggle(new GUIContent("VAD", "Voice Activity Detection"), processor.VAD); processor.HighPass = EditorGUILayout.Toggle(new GUIContent("HighPass", "High Pass Filter"), processor.HighPass); processor.NoiseSuppression = EditorGUILayout.Toggle(new GUIContent("NoiseSuppression", "Noise Suppression"), processor.NoiseSuppression); } else { bool aec = this.aecSp.boolValue; bool aecMobile = this.aecMobileSp.boolValue; aec = EditorGUILayout.Toggle(new GUIContent("AEC", "Acoustic Echo Cancellation"), aec); if (aec && aecMobile) { aecMobile = false; } aecMobile = EditorGUILayout.Toggle(new GUIContent("AEC Mobile", "Acoustic Echo Cancellation Mobile"), aecMobile); if (aec && aecMobile) { aec = false; } this.aecSp.boolValue = aec; this.aecMobileSp.boolValue = aecMobile; if (this.aecSp.boolValue || this.aecMobileSp.boolValue) { if (recorder.MicrophoneType == Recorder.MicType.Photon) { EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning); } EditorGUILayout.PropertyField(this.reverseStreamDelayMsSp, new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Milliseconds")); } if (this.aecMobileSp.boolValue) { EditorGUILayout.PropertyField(this.aecMobileComfortNoiseSp, new GUIContent("AEC Mobile Comfort Noise", "Acoustic Echo Cancellation Mobile Comfort Noise")); } EditorGUILayout.PropertyField(this.agcSp, new GUIContent("AGC", "Automatic Gain Control")); if (this.vadSp.boolValue && recorder.VoiceDetection) { EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning); } EditorGUILayout.PropertyField(this.vadSp, new GUIContent("VAD", "Voice Activity Detection")); EditorGUILayout.PropertyField(this.highPassSp, new GUIContent("HighPass", "High Pass Filter")); EditorGUILayout.PropertyField(this.noiseSuppressionSp, new GUIContent("NoiseSuppression", "Noise Suppression")); } } if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } }
public override void OnInspectorGUI() { // serializedObject.UpdateIfRequiredOrScript(); if (Application.isPlaying && recorder.RequiresInit) { if (recorder.IsInitialized) { EditorGUILayout.HelpBox("Recorder requires re-initialization. Call Recorder.ReInit().", MessageType.Warning); if (GUILayout.Button("ReInit")) { recorder.ReInit(); } } else { EditorGUILayout.HelpBox("Recorder requires initialization. Call Recorder.Init(VoiceClient, Object).", MessageType.Warning); } } VoiceLogger.ExposeLogLevel(serializedObject, recorder); EditorGUI.BeginChangeCheck(); recorder.ReactOnSystemChanges = EditorGUILayout.Toggle(new GUIContent("React On System Changes", "If true, ReInit when Unity detects Audio Config. changes."), recorder.ReactOnSystemChanges); recorder.TransmitEnabled = EditorGUILayout.Toggle(new GUIContent("Transmit Enabled", "If true, audio transmission is enabled."), recorder.TransmitEnabled); if (recorder.IsInitialized || !recorder.RequiresInit) { recorder.IsRecording = EditorGUILayout.Toggle(new GUIContent("IsRecording", "If true, audio recording is on."), recorder.IsRecording); } if (recorder.IsRecording) { float amplitude = 0f; if (recorder.IsCurrentlyTransmitting) { amplitude = recorder.LevelMeter.CurrentPeakAmp; if (amplitude > 1f) { amplitude /= 32768; } } EditorGUILayout.Slider("Level", amplitude, 0, 1); } recorder.Encrypt = EditorGUILayout.Toggle(new GUIContent("Encrypt", "If true, voice stream is sent encrypted."), recorder.Encrypt); recorder.InterestGroup = (byte)EditorGUILayout.IntField(new GUIContent("Interest Group", "Target interest group that will receive transmitted audio."), recorder.InterestGroup); if (recorder.InterestGroup == 0) { recorder.DebugEchoMode = EditorGUILayout.Toggle(new GUIContent("Debug Echo", "If true, outgoing stream routed back to client via server same way as for remote client's streams."), recorder.DebugEchoMode); } recorder.ReliableMode = EditorGUILayout.Toggle(new GUIContent("Reliable Mode", "If true, stream data sent in reliable mode."), recorder.ReliableMode); EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel); recorder.FrameDuration = (OpusCodec.FrameDuration)EditorGUILayout.EnumPopup(new GUIContent("Frame Duration", "Outgoing audio stream encoder delay."), recorder.FrameDuration); recorder.SamplingRate = (POpusCodec.Enums.SamplingRate)EditorGUILayout.EnumPopup( new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate."), recorder.SamplingRate); recorder.Bitrate = EditorGUILayout.IntField(new GUIContent("Bitrate", "Outgoing audio stream bitrate."), recorder.Bitrate); EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel); recorder.SourceType = (Recorder.InputSourceType)EditorGUILayout.EnumPopup(new GUIContent("Input Source Type", "Input audio data source type"), recorder.SourceType); switch (recorder.SourceType) { case Recorder.InputSourceType.Microphone: recorder.MicrophoneType = (Recorder.MicType)EditorGUILayout.EnumPopup( new GUIContent("Microphone Type", "Which microphone API to use when the Source is set to Microphone."), recorder.MicrophoneType); switch (recorder.MicrophoneType) { case Recorder.MicType.Unity: unityMicrophoneDeviceIndex = EditorGUILayout.Popup("Microphone Device", unityMicrophoneDeviceIndex, Microphone.devices); recorder.UnityMicrophoneDevice = Microphone.devices[unityMicrophoneDeviceIndex]; int minFreq, maxFreq; Microphone.GetDeviceCaps(Microphone.devices[unityMicrophoneDeviceIndex], out minFreq, out maxFreq); EditorGUILayout.LabelField("Microphone Device Caps", string.Format("{0}..{1} Hz", minFreq, maxFreq)); break; case Recorder.MicType.Photon: #if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX if (Recorder.PhotonMicrophoneEnumerator.IsSupported) { recorder.PhotonMicrophoneDeviceId = EditorGUILayout.Popup("Microphone Device", recorder.PhotonMicrophoneDeviceId, photonDeviceNames); } else { recorder.PhotonMicrophoneDeviceId = -1; EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error); } #endif break; default: throw new ArgumentOutOfRangeException(); } break; case Recorder.InputSourceType.AudioClip: recorder.AudioClip = EditorGUILayout.ObjectField(new GUIContent("Audio Clip", "Source audio clip."), recorder.AudioClip, typeof(AudioClip), false) as AudioClip; recorder.LoopAudioClip = EditorGUILayout.Toggle(new GUIContent("Loop", "Loop playback for audio clip sources."), recorder.LoopAudioClip); break; case Recorder.InputSourceType.Factory: EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info); break; default: throw new ArgumentOutOfRangeException(); } recorder.TypeConvert = (Recorder.SampleTypeConv)EditorGUILayout.EnumPopup( new GUIContent("Type Convert", "Force creation of 'short' pipeline and convert audio data to short for 'float' audio sources."), recorder.TypeConvert); EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel); recorder.VoiceDetection = EditorGUILayout.Toggle(new GUIContent("Detect", "If true, voice detection enabled."), recorder.VoiceDetection); if (recorder.VoiceDetection) { recorder.VoiceDetectionThreshold = EditorGUILayout.FloatField(new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."), recorder.VoiceDetectionThreshold); recorder.VoiceDetectionDelayMs = EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), recorder.VoiceDetectionDelayMs); if (recorder.VoiceDetectorCalibrating) { EditorGUILayout.LabelField(string.Format("Calibrating {0} ms", calibrationTime)); } else { calibrationTime = EditorGUILayout.IntField("Calibration Time (ms)", calibrationTime); if (recorder.IsInitialized) { if (GUILayout.Button("Calibrate")) { recorder.VoiceDetectorCalibrate(calibrationTime); } } } } if (EditorGUI.EndChangeCheck()) { serializedObject.ApplyModifiedProperties(); } }
public override void OnInspectorGUI() { this.serializedObject.UpdateIfRequiredOrScript(); if (!this.processor.enabled) { EditorGUILayout.HelpBox("WebRtcAudioDsp is disabled and will not be used.", MessageType.Warning); } if (this.recorder != null && this.recorder.SourceType != Recorder.InputSourceType.Microphone) { EditorGUILayout.HelpBox("WebRtcAudioDsp is better suited to be used with Microphone as Recorder Input Source Type.", MessageType.Warning); } VoiceLogger.ExposeLogLevel(this.serializedObject, this.processor); bool bypassed; EditorGUI.BeginChangeCheck(); if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.processor.gameObject)) { this.processor.Bypass = EditorGUILayout.Toggle(new GUIContent("Bypass", "Bypass WebRTC Audio DSP"), this.processor.Bypass); bypassed = this.processor.Bypass; } else { EditorGUILayout.PropertyField(this.bypassSp, new GUIContent("Bypass", "Bypass WebRTC Audio DSP")); bypassed = this.bypassSp.boolValue; } if (!bypassed) { if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.processor.gameObject)) { this.processor.AEC = EditorGUILayout.Toggle(new GUIContent("AEC", "Acoustic Echo Cancellation"), this.processor.AEC); if (this.processor.AEC) { if (this.recorder.MicrophoneType == Recorder.MicType.Photon) { EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning); } this.processor.ReverseStreamDelayMs = EditorGUILayout.IntField(new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Milliseconds"), this.processor.ReverseStreamDelayMs); } this.processor.AGC = EditorGUILayout.Toggle(new GUIContent("AGC", "Automatic Gain Control"), this.processor.AGC); if (this.processor.VAD && this.recorder.VoiceDetection) { EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning); } this.processor.VAD = EditorGUILayout.Toggle(new GUIContent("VAD", "Voice Activity Detection"), this.processor.VAD); this.processor.HighPass = EditorGUILayout.Toggle(new GUIContent("HighPass", "High Pass Filter"), this.processor.HighPass); this.processor.NoiseSuppression = EditorGUILayout.Toggle(new GUIContent("NoiseSuppression", "Noise Suppression"), this.processor.NoiseSuppression); } else { EditorGUILayout.PropertyField(this.aecSp, new GUIContent("AEC", "Acoustic Echo Cancellation")); if (this.aecSp.boolValue) { if (this.recorder.MicrophoneType == Recorder.MicType.Photon) { EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning); } EditorGUILayout.PropertyField(this.reverseStreamDelayMsSp, new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Milliseconds")); } EditorGUILayout.PropertyField(this.agcSp, new GUIContent("AGC", "Automatic Gain Control")); if (this.vadSp.boolValue && this.recorder.VoiceDetection) { EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning); } EditorGUILayout.PropertyField(this.vadSp, new GUIContent("VAD", "Voice Activity Detection")); EditorGUILayout.PropertyField(this.highPassSp, new GUIContent("HighPass", "High Pass Filter")); EditorGUILayout.PropertyField(this.noiseSuppressionSp, new GUIContent("NoiseSuppression", "Noise Suppression")); } } if (EditorGUI.EndChangeCheck()) { this.serializedObject.ApplyModifiedProperties(); } }