Exemple #1
0
        private void RefreshMicrophonesButtonOnClickHandler()
        {
            CustomMicrophone.RequestMicrophonePermission();

            microphonesDropdown.ClearOptions();
            microphonesDropdown.AddOptions(CustomMicrophone.devices.ToList());
        }
Exemple #2
0
        private void StopRecord()
        {
            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

            if (!CustomMicrophone.IsRecording(selectedDevice))
            {
                return;
            }

            CustomMicrophone.End(selectedDevice);

            if (makeCopy)
            {
                recordedClips.Add(MakeCopy($"copy{recordedClips.Count}", recordingTime, frequency, _workingClip));
                audioSource.clip = recordedClips.Last();
            }
            else
            {
                audioSource.clip = _workingClip;
            }

            audioSource.Play();
        }
        public void StopRecord()
        {
            if (!IsRecording || !ReadyToRecord())
            {
                return;
            }

            IsRecording = false;

            CustomMicrophone.End(MicrophoneDevice);

            if (!DetectVoice)
            {
                LastRecordedRaw  = _currentRecordingVoice.ToArray();
                LastRecordedClip = AudioConvert.Convert(LastRecordedRaw, _microphoneWorkingAudioClip.channels);
            }

            if (_currentRecordingVoice != null)
            {
                _currentRecordingVoice.Clear();
            }

            _currentAudioSamples   = null;
            _currentRecordingVoice = null;

#if NET_2_0 || NET_2_0_SUBSET
            if (RecordEndedEvent != null)
            {
                RecordEndedEvent(LatestVoiceAudioClip, LastRecordedRaw);
            }
#else
            RecordEndedEvent?.Invoke(LastRecordedClip, LastRecordedRaw);
#endif
        }
Exemple #4
0
    void Start()
    {
        if (!photonView.IsMine)
        {
            return;
        }

        audioListener.enabled = true;

        if (!CustomMicrophone.HasConnectedMicrophoneDevices())
        {
            CustomMicrophone.RefreshMicrophoneDevices();
        }
        Debug.Log(CustomMicrophone.devices.Length + " microphone devices found");

        if (!CustomMicrophone.HasConnectedMicrophoneDevices())
        {
            Debug.Log("no microphone device connected");
        }

        _microphoneDevice = CustomMicrophone.devices[0];
        audioClip         = CustomMicrophone.Start(_microphoneDevice, true, lengthSec, frequency);

        //subscribe to events
        VoiceChat.Instance.OnStatusUpdate += OnStatusupdate;
        VoiceChat.Instance.OnIDUpdate     += OnIDUpdate;

        //initialize peer object for this client
        VoiceChat.Instance.InitializePeer();
    }
        /// <summary>
        /// Processes samples data from microphone recording and fills buffer of samples then sends it over network
        /// </summary>
        private void ProcessRecording()
        {
            int currentPosition = CustomMicrophone.GetPosition(_microphoneDevice);

            // fix for end record incorrect position
            if (_stopRecordPosition != -1)
            {
                currentPosition = _stopRecordPosition;
            }

            if (recording || currentPosition != _lastPosition)
            {
                float[] array = new float[Constants.RecordingTime * Constants.SampleRate];
                CustomMicrophone.GetRawData(ref array, _workingClip);

                if (_lastPosition != currentPosition && array.Length > 0)
                {
                    if (_lastPosition > currentPosition)
                    {
                        _buffer.AddRange(GetChunk(array, _lastPosition, array.Length - _lastPosition));
                        _buffer.AddRange(GetChunk(array, 0, currentPosition));
                    }
                    else
                    {
                        _buffer.AddRange(GetChunk(array, _lastPosition, currentPosition - _lastPosition));
                    }

                    // sends data chunky
                    if (_buffer.Count >= Constants.ChunkSize)
                    {
                        SendDataToNetwork(_buffer.GetRange(0, Constants.ChunkSize));
                        _buffer.RemoveRange(0, Constants.ChunkSize);
                    }
                }

                _lastPosition = currentPosition;
            }
            else
            {
                _lastPosition = currentPosition;

                if (_buffer.Count > 0)
                {
                    // sends left data chunky
                    if (_buffer.Count >= Constants.ChunkSize)
                    {
                        SendDataToNetwork(_buffer.GetRange(0, Constants.ChunkSize));
                        _buffer.RemoveRange(0, Constants.ChunkSize);
                    }
                    // sends all left data
                    else
                    {
                        SendDataToNetwork(_buffer);
                        _buffer.Clear();
                    }
                }
            }
        }
Exemple #6
0
        private void StartRecord()
        {
            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

            _workingClip = CustomMicrophone.Start(selectedDevice, false, recordingTime, frequency);
        }
 public void StopRecording()
 {
     if (_recordingRoutine != 0)
     {
         CustomMicrophone.End(_microphoneID);
         Runnable.Stop(_recordingRoutine);
         _recordingRoutine = 0;
     }
 }
        /// <summary>
        /// Currently works as synchronous function with callback when app unpauses
        /// could not work properly if has enabled checkbox regarding additional frame in pause
        /// </summary>
        /// <param name="callback"></param>
        public void RequestMicrophonePermission(Action <bool> callback)
        {
#if UNITY_ANDROID
            Permission.RequestUserPermission(Permission.Microphone);
#elif UNITY_WEBGL
            CustomMicrophone.RequestMicrophonePermission();
#endif
            callback?.Invoke(HasMicrophonePermission());
        }
Exemple #9
0
        private void StartRecord()
        {
            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

            _workingClip = CustomMicrophone.Start(CustomMicrophone.devices[0], true, 4, 44100);
        }
    private IEnumerator RecordingHandler()
    {
        //Log.Debug("STT.RecordingHandler()", "devices: {0}", Microphone.devices);
        _recording = CustomMicrophone.Start(_microphoneID, true, _recordingBufferSize, _recordingHZ);

        yield return(null);      // let _recordingRoutine get set..

        if (_recording == null)
        {
            StopRecording();
            yield break;
        }

        bool bFirstBlock = true;
        int  midPoint    = _recording.samples / 2;

        float[] samples = null;

        while (_recordingRoutine != 0 && _recording != null)
        {
            int writePos = CustomMicrophone.GetPosition(_microphoneID);
            if (writePos > _recording.samples || !CustomMicrophone.IsRecording(_microphoneID))
            {
                Log.Error("STT.RecordingHandler()", "Microphone disconnected.");
                StopRecording();
                yield break;
            }

            if ((bFirstBlock && writePos >= midPoint) ||
                (!bFirstBlock && writePos < midPoint))
            {
                // front block is recorded, make a RecordClip and pass it onto our callback.
                samples = new float[midPoint];
                _recording.GetData(samples, bFirstBlock ? 0 : midPoint);

                AudioData record = new AudioData();
                record.MaxLevel = Mathf.Max(Mathf.Abs(Mathf.Min(samples)), Mathf.Max(samples));
                record.Clip     = AudioClip.Create("Recording", midPoint, _recording.channels, _recordingHZ, false);
                record.Clip.SetData(samples, 0);

                _service.OnListen(record);

                bFirstBlock = !bFirstBlock;
            }
            else
            {
                // calculate the number of samples remaining until we ready for a block of audio,
                // and wait that amount of time it will take to record.
                int   remaining     = bFirstBlock ? (midPoint - writePos) : (_recording.samples - writePos);
                float timeRemaining = (float)remaining / (float)_recordingHZ;

                yield return(new WaitForSeconds(timeRemaining));
            }
        }

        yield break;
    }
        public bool HasMicrophonePermission()
        {
#if UNITY_ANDROID
            return(Permission.HasUserAuthorizedPermission(Permission.Microphone));
#elif UNITY_WEBGL
            return(CustomMicrophone.HasMicrophonePermission());
#else
            return(true);
#endif
        }
        /// <summary>
        /// Initializes buffer, refreshes microphones list and selects first microphone device if exists
        /// </summary>
        private void Start()
        {
            _buffer = new List <float>();

            RefreshMicrophones();

            if (CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                _microphoneDevice = CustomMicrophone.devices[0];
            }
        }
Exemple #13
0
        private void Update()
        {
            permissionStatusText.text = string.Format("Microphone permission: {1} for '{0}'", selectedDevice,
                                                      CustomMicrophone.HasMicrophonePermission() ? "<color=green>granted</color>" : "<color=red>denined</color>");

            if (CustomMicrophone.devices.Length > 0)
            {
                recordingStatusText.text = string.Format("Microphone status: {0}",
                                                         CustomMicrophone.IsRecording(selectedDevice) ? "<color=green>recording</color>" : "<color=yellow>idle</color>");
            }
        }
Exemple #14
0
        private void Start()
        {
#if UNITY_WEBGL && !UNITY_EDITOR
            _buffer           = new Buffer();
            _audioSource.clip = AudioClip.Create("BufferedClip", _sampleRate * _recordingTime, 1, _sampleRate, false);
#endif
            CustomMicrophone.RequestMicrophonePermission();

            startRecord.onClick.AddListener(StartRecordHandler);
            stopRecord.onClick.AddListener(StopRecordHandler);
        }
        public void StartRecord(bool withVoiceDetection = false)
        {
            if (IsRecording)
            {
                return;
            }

            if (!ReadyToRecord())
            {
#if NET_2_0 || NET_2_0_SUBSET
                if (RecordFailedEvent != null)
                {
                    RecordFailedEvent();
                }
#else
                RecordFailedEvent?.Invoke();
#endif
                return;
            }

            DetectVoice = withVoiceDetection;

            _maxVoiceFrame = 0;

            _currentRecordingVoice = new List <float>();

            if (_microphoneWorkingAudioClip != null)
            {
                MonoBehaviour.Destroy(_microphoneWorkingAudioClip);
            }

            if (LastRecordedClip != null)
            {
                MonoBehaviour.Destroy(LastRecordedClip);
            }

            _microphoneWorkingAudioClip = CustomMicrophone.Start(MicrophoneDevice, true, 1, 16000);

            _currentAudioSamples = new float[_microphoneWorkingAudioClip.samples * _microphoneWorkingAudioClip.channels];

            IsRecording = true;

#if NET_2_0 || NET_2_0_SUBSET
            if (RecordStartedEvent != null)
            {
                RecordStartedEvent();
            }
#else
            RecordStartedEvent?.Invoke();
#endif
        }
Exemple #16
0
        private AudioClip MakeCopy(string name, int recordingTime, int frequency, AudioClip sourceClip)
        {
            float[] array = new float[recordingTime * frequency];
            if (CustomMicrophone.GetRawData(ref array, sourceClip))
            {
                AudioClip audioClip = AudioClip.Create(name, recordingTime * frequency, 1, frequency, false);
                audioClip.SetData(array, 0);
                audioClip.LoadAudioData();

                return(audioClip);
            }

            return(null);
        }
Exemple #17
0
        /// <summary>
        /// Starts recording of microphone
        /// </summary>
        public void StartRecord()
        {
            if (CustomMicrophone.IsRecording(_microphoneDevice) || !CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                RecordFailedEvent?.Invoke("record already started or no microphone device conencted");
                return;
            }

            recording = true;

            _workingClip = CustomMicrophone.Start(_microphoneDevice, true, Constants.RecordingTime, Constants.SampleRate);

            RecordStartedEvent?.Invoke();
        }
Exemple #18
0
        private void StopRecord()
        {
            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

            if (!CustomMicrophone.IsRecording(CustomMicrophone.devices[0]))
            {
                return;
            }

            CustomMicrophone.End(CustomMicrophone.devices[0]);
        }
Exemple #19
0
        private void RefreshMicrophoneDevicesButtonOnclickHandler()
        {
            RequestPermission();
            CustomMicrophone.RefreshMicrophoneDevices();

            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

            devicesDropdown.ClearOptions();
            devicesDropdown.AddOptions(CustomMicrophone.devices.ToList());
            DevicesDropdownValueChangedHandler(0);
        }
    // Start is called before the first frame update
    void Start()
    {
        var voice = GameObject.Find("[PeerJS]VoiceChat");

        if (voice != null)
        {
            Destroy(voice);
        }

        var microphone = GameObject.Find("[FG]Microphone");

        if (microphone != null)
        {
            if (CustomMicrophone.IsRecording(CustomMicrophone.devices[0]))
            {
                CustomMicrophone.End(CustomMicrophone.devices[0]);
            }
            Destroy(microphone);
        }



        // request microphone permissions at the start of the menu
        if (!CustomMicrophone.HasMicrophonePermission())
        {
            CustomMicrophone.RequestMicrophonePermission();
        }
        if (!CustomMicrophone.HasConnectedMicrophoneDevices())
        {
            CustomMicrophone.RefreshMicrophoneDevices();
        }
        //Debug.Log(CustomMicrophone.devices.Length + " microphone devices found");

        // destroys game tracker from previous game
        if (GameObject.FindGameObjectWithTag("GameTracker") != null)
        {
            Destroy(GameObject.FindGameObjectWithTag("GameTracker"));
        }

        // disconnects the player if they were already connected
        if (PhotonNetwork.IsConnected)
        {
            PhotonNetwork.Disconnect();
            Cursor.lockState = CursorLockMode.None;
            Cursor.visible   = true;
        }
        PhotonNetwork.ConnectUsingSettings();
        //Debug.Log(PhotonNetwork.PhotonServerSettings);
    }
Exemple #21
0
        private void StopRecordHandler()
        {
            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

            CustomMicrophone.End(CustomMicrophone.devices[0]);
            _audioSource.Stop();

#if UNITY_WEBGL && !UNITY_EDITOR
            _buffer.data.Clear();
            _buffer.position = 0;
#endif
        }
Exemple #22
0
        private void StartRecordHandler()
        {
            if (!CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                return;
            }

#if UNITY_EDITOR
            _audioSource.clip =
#endif
            CustomMicrophone.Start(CustomMicrophone.devices[0], true, _recordingTime, _sampleRate);
#if UNITY_EDITOR
            _audioSource.loop = true;
            _audioSource.Play();
#endif
        }
Exemple #23
0
    private void UpdateRipples()
    {
        if (!photonView.IsMine)
        {
            return;
        }

        int currentPosition = CustomMicrophone.GetPosition(_microphoneDevice);

        //cheks how many samples were recorder since last time we calculated
        //if it went around the buffer (so past the limit and back to 0) we just
        //do the rest of the array and next time start from 0
        if (currentPosition != _lastPosition)
        {
            int     length = Constants.RecordingTime * Constants.SampleRate;
            float[] data   = new float[length];

            CustomMicrophone.GetRawData(ref data, recorder.AudioClip);

            if (currentPosition > _lastPosition)
            {
                int len = currentPosition - _lastPosition;
                decibelsValue = ComputeDB(data, _lastPosition, ref len);
                _lastPosition = currentPosition;
            }
            else
            {
                int len = data.Length - _lastPosition;
                decibelsValue = ComputeDB(data, _lastPosition, ref len);
                _lastPosition = 0;
            }

            //udpate sound ripples animation on all clients
            photonView.RPC(nameof(UpdateSoundRiples), RpcTarget.All, decibelsValue);

            if (count < 1 / _updateFrequency)
            {
                count++;
                positiveInLastSecond |= decibelsValue > 0;
            }
            else
            {
                count = 1;
                positiveInLastSecond = decibelsValue > 0;
            }
        }
    }
        public IEnumerator OneTimeRecord(int durationSec, Action <AudioClip> callback, int sampleRate = 16000)
        {
            AudioClip clip = CustomMicrophone.Start(MicrophoneDevice, false, durationSec, sampleRate);

            yield return(new WaitForSeconds(durationSec));

            CustomMicrophone.End(MicrophoneDevice);

#if !NET_2_0 && !NET_2_0_SUBSET
            callback?.Invoke(clip);
#else
            if (callback != null)
            {
                callback(clip);
            }
#endif
        }
Exemple #25
0
        /// <summary>
        /// Stops recording of microphone
        /// </summary>
        public void StopRecord()
        {
            if (!CustomMicrophone.IsRecording(_microphoneDevice))
            {
                return;
            }

            recording = false;

            if (CustomMicrophone.HasConnectedMicrophoneDevices())
            {
                CustomMicrophone.End(_microphoneDevice);
            }

            if (_workingClip != null)
            {
                Destroy(_workingClip);
            }

            RecordEndedEvent?.Invoke();
        }
        public IEnumerator OneTimeRecord(int durationSec, Action <float[]> callback, int sampleRate = 16000)
        {
            AudioClip clip = CustomMicrophone.Start(MicrophoneDevice, false, durationSec, sampleRate);

            yield return(new WaitForSeconds(durationSec));

            CustomMicrophone.End(MicrophoneDevice);

            float[] array = new float[clip.samples * clip.channels];

            CustomMicrophone.GetRawData(ref array, clip);

#if !NET_2_0 && !NET_2_0_SUBSET
            callback?.Invoke(array);
#else
            if (callback != null)
            {
                callback(array);
            }
#endif
        }
Exemple #27
0
        /// <summary>
        /// Processes samples data from microphone recording and fills buffer of samples then sends it over network
        /// </summary>
        private void ProcessRecording()
        {
            int currentPosition = CustomMicrophone.GetPosition(_microphoneDevice);

            if (recording || currentPosition != _lastPosition)
            {
                float[] array = new float[Constants.RecordingTime * Constants.SampleRate];
                CustomMicrophone.GetRawData(ref array, _workingClip);

                if (_lastPosition != currentPosition && array.Length > 0)
                {
                    int lastPosition = _lastPosition;
                    _lastPosition = currentPosition;

                    if (lastPosition > _lastPosition)
                    {
                        _buffer.AddRange(array.ToList().GetRange(lastPosition, array.Length - lastPosition));
                        _buffer.AddRange(array.ToList().GetRange(0, _lastPosition));
                    }
                    else
                    {
                        _buffer.AddRange(array.ToList().GetRange(lastPosition, _lastPosition - lastPosition));
                    }
                }

                if (_buffer.Count >= Constants.ChunkSize)
                {
                    SendDataToNetwork(_buffer.GetRange(0, Constants.ChunkSize));
                    _buffer.RemoveRange(0, Constants.ChunkSize);
                }
            }
            else
            {
                if (_buffer.Count > 0)
                {
                    SendDataToNetwork(_buffer);
                    _buffer.Clear();
                }
            }
        }
 /// <summary>
 /// Requests microphone perission and refreshes list of microphones if WebGL platform
 /// </summary>
 public void RefreshMicrophones()
 {
     CustomMicrophone.RequestMicrophonePermission();
 }
        public void Update()
        {
            if (IsRecording)
            {
                _currentSamplePosition = CustomMicrophone.GetPosition(MicrophoneDevice);
                CustomMicrophone.GetRawData(ref _currentAudioSamples, _microphoneWorkingAudioClip);

                if (DetectVoice)
                {
                    bool isTalking = _voiceDetectionManager.HasDetectedVoice(AudioClip2ByteConverter.FloatToByte(_currentAudioSamples));

                    if (isTalking)
                    {
                        _endTalkingDelay = 0f;
                    }
                    else
                    {
                        _endTalkingDelay += Time.deltaTime;
                    }

                    if (!_isTalking && isTalking)
                    {
                        _isTalking = true;

#if NET_2_0 || NET_2_0_SUBSET
                        if (TalkBeganEvent != null)
                        {
                            TalkBeganEvent();
                        }
#else
                        TalkBeganEvent?.Invoke();
#endif
                    }
                    else if (_isTalking && !isTalking && _endTalkingDelay >= _speechRecognitionManager.CurrentConfig.voiceDetectionEndTalkingDelay)
                    {
                        _isTalking = false;

                        LastRecordedRaw  = _currentRecordingVoice.ToArray();
                        LastRecordedClip = AudioConvert.Convert(LastRecordedRaw, _microphoneWorkingAudioClip.channels);

                        _currentRecordingVoice.Clear();
#if NET_2_0 || NET_2_0_SUBSET
                        if (TalkEnded != null)
                        {
                            TalkEnded(LatestVoiceAudioClip, LastRecordedRaw);
                        }
#else
                        TalkEndedEvent?.Invoke(LastRecordedClip, LastRecordedRaw);
#endif
                    }
                    else if (_isTalking && isTalking)
                    {
                        AddAudioSamplesIntoBuffer();
                    }
                }
                else
                {
                    AddAudioSamplesIntoBuffer();
                }

                _previousSamplePosition = _currentSamplePosition;
            }
        }
 public bool HasConnectedMicrophoneDevices()
 {
     return(CustomMicrophone.HasConnectedMicrophoneDevices());
 }