public void stopRecording()
    {
        Microphone.End(null);
        Debug.LogError("Audio Time!!!" + audioSource1.time);
        Sarah.clip = GameObject.Find("Sarah").GetComponent <SarahScript>().endClip;
        Sarah.Play();
        SarahEndSpeakStart = true;

        //Debug.LogError(audioSource1.)
        Debug.LogError(audioSource1.clip.length);
        audioSource1.volume = 1;
        audioSource1.mute   = false;
        // audioSource1.Play();
    }
Esempio n. 2
0
 public void StopRecording()
 {
     if (lastTranscription != "")
     {
         //m_callbackMethod(lastTranscription);
     }
     Active = false;
     if (_recordingRoutine != 0)
     {
         Microphone.End(_microphoneID);
         Runnable.Stop(_recordingRoutine);
         _recordingRoutine = 0;
     }
 }
Esempio n. 3
0
    public void CleanUp()
    {
        if (!string.IsNullOrEmpty(DeviceName) &&
            Microphone.IsRecording(DeviceName))
        {
            Microphone.End(DeviceName);
        }

        if (null != audio.clip)
        {
            UnityEngine.Object.DestroyImmediate(audio.clip, true);
            audio.clip = null;
        }
    }
Esempio n. 4
0
 public void onSetState(bool state)
 {
     m_state = state;
     if (state)
     {
     }
     else
     {
         // Stop recording from the default microphone
         Microphone.End(null);
         // Send the StopSendingAudio event
         //AudioTransmissionWriter.Send(new AudioTransmission.Update().AddStopSendingAudio(new StopSendingAudio()));
     }
 }
Esempio n. 5
0
    public void Grabar()
    {
        //stop audio playback and start new recording...
        audioSource.Stop();
        tempRecording.Clear();
        Microphone.End(null);
        audioSource.clip = Microphone.Start(null, true, 1, 44100);
        Invoke("ResizeRecording", 1);

        controlAudio.ReproducirSonido(NombresAudios.Grabando);
        PanelListoProcesar.SetActive(false);
        Grabando = true;
        txtEstado.SetText("[Grabando]");
    }
Esempio n. 6
0
 public static bool TryStartRecording()
 {
     try
     {
         Microphone.End(null);
         clip = Microphone.Start(null, false, maxRecordTime, samplingRate);
         Debuger.Log("开始录音...");
     }
     catch (System.Exception e)
     {
         return(false);
     }
     return(true);
 }
    private IEnumerator DoTakeSample()
    {
        yield return(new WaitForSeconds(2));

        _audioData = new float[SAMPLES];
        _currentAudioClip.GetData(_audioData, 0);
        var frecuencySample = GetFrecuencySampleFromAudioData(_audioData);

        for (var j = 0; j < SAMPLES / DIVISOR; j++)
        {
            _commandDetectorData[j] = frecuencySample[j] / ALL_SAMPLES;
        }
        Microphone.End(null);
    }
Esempio n. 8
0
 private void OnApplicationPause(bool pauseStatus)
 {
     if (pauseStatus)
     {
         if (Microphone.IsRecording(deviceName))
         {
             Microphone.End(deviceName);
         }
     }
     else
     {
         iniMicDeviceCap(_dropdown.options[_dropdown.value].text);
     }
 }
Esempio n. 9
0
    public void StopRecording()
    {
        if (isRecording)
        {
            source.volume = 1;
            isRecording   = false;
            Microphone.End(selectedDevice);

            samples = new float[source.clip.samples * source.clip.channels];
            source.clip.GetData(samples, 0);

            HandleAudioBuffer();
        }
    }
Esempio n. 10
0
    /// <summary>
    /// This event is fired when the recognizer stops, whether from Stop() being called, a timeout occurring, or some other error.
    /// Typically, this will simply return "Complete". In this case, we check to see if the recognizer timed out.
    /// </summary>
    /// <param name="cause">An enumerated reason for the session completing.</param>
    private void DictationRecognizer_DictationComplete(DictationCompletionCause cause)
    {
        // If Timeout occurs, the user has been silent for too long.
        // With dictation, the default timeout after a recognition is 20 seconds.
        // The default timeout with initial silence is 5 seconds.
        if (cause == DictationCompletionCause.TimeoutExceeded)
        {
            Microphone.End(deviceName);

            speechText.GetComponent <TextMesh>().text = "Speech has ended.";
            textSoFar.Remove(0, textSoFar.ToString().Length);
            SendMessage("ResetAfterTimeout");
        }
    }
 //マイクの録音をリスタートしようとします。もし指定したマイクが完全に認識できない場合、ストップします。
 private void RestartMicrophone()
 {
     Microphone.End(DeviceName);
     IsRecording = false;
     if (Microphone.devices.Contains(DeviceName))
     {
         Debug.Log("Restart Microphone Success: " + DeviceName);
         StartRecording(DeviceName);
     }
     else
     {
         Debug.Log("Restart Microphone Failed: " + DeviceName);
     }
 }
Esempio n. 12
0
 void Stop()
 {
     if (Microphone.IsRecording(""))
     {
         Microphone.End(null);
     }
     else
     {
         if (GetComponent <AudioSource>().clip != null)
         {
             GetComponent <AudioSource>().Stop();
         }
     }
 }
Esempio n. 13
0
    private void OnClickEndBtn()
    {
        if ((isHaveMicrophone == false) || (!Microphone.IsRecording(devices[0])))
        {
            return;
        }

        //结束录音
        Microphone.End(devices[0]);

        bool Res = SaveWav(time.Hour + "-" + time.Minute + "-" + time.Second, aud.clip);

        StartCoroutine(IEHttpGet(time.Hour + "-" + time.Minute + "-" + time.Second));
    }
        /// <summary>
        /// This event is fired when the recognizer stops, whether from StartRecording() being called, a timeout occurring, or some other error.
        /// Typically, this will simply return "Complete". In this case, we check to see if the recognizer timed out.
        /// </summary>
        /// <param name="cause">An enumerated reason for the session completing.</param>
        private void DictationRecognizer_DictationComplete(DictationCompletionCause cause)
        {
            // If Timeout occurs, the user has been silent for too long.
            if (cause == DictationCompletionCause.TimeoutExceeded)
            {
                Microphone.End(deviceName);

                dictationResult = "Dictation has timed out. Please try again.";
            }

            MixedRealityToolkit.InputSystem.RaiseDictationComplete(inputSource, dictationResult, dictationAudioClip);
            textSoFar       = null;
            dictationResult = string.Empty;
        }
    // Start is called before the first frame update
    void Start()
    {
        //WebSocket = GetComponent<UnityWebSocketScript>();

        Microphone.End(null);//录音时先停掉录音,录音参数为null时采用默认的录音驱动

        _audio = GetComponent <AudioSource>();

        _device = Microphone.devices[0];//获取设备麦克风
        _clip   = Microphone.Start(_device, true, 5, FREQUENCY);
        while (!(Microphone.GetPosition(null) > 0))
        {
        }
    }
Esempio n. 16
0
    void Update()
    {
        if (Input.GetKeyDown(recordingKey))
        {
            aud.clip = Microphone.Start(null, false, 4, 16000);
        }

        if (Input.GetKeyUp(recordingKey))
        {
            Microphone.End(null);
            SavWav.Save("myfile", aud.clip);
            VogSpeechTranslate();
        }
    }
Esempio n. 17
0
    public void CleanUp()
    {
        if (!string.IsNullOrEmpty(DeviceName) &&
            Microphone.IsRecording(DeviceName))
        {
            Microphone.End(DeviceName);
        }

        if (null != GetComponent <AudioSource>().clip)
        {
            UnityEngine.Object.DestroyImmediate(GetComponent <AudioSource>().clip, true);
            GetComponent <AudioSource>().clip = null;
        }
    }
Esempio n. 18
0
    IEnumerator Test()
    {
        AudioSource aud = GetComponent <AudioSource>();

        Debug.Log("1");
        aud.clip = Microphone.Start(null, false, 300, 44100);
        Debug.Log("2");

        yield return(new WaitForSeconds(5));

        Microphone.End(null);
        aud.Play();
        Debug.Log("3");
    }
Esempio n. 19
0
        #pragma warning disable 0618
    public static void ToggleRecord()
    {
        if (!hasMicrophone())
        {
            AlertsService.makeAlert("Sem microfone", "Nenhum microfone foi encontrado em seu dispositivo.", "Entendi");
            return;
        }

        ConfigureMicrophone();

        if (isRecording())         //Recording is in progress, then stop it.
        {
            SavWav.instance.Init();
            var fileName = GetAudioFileName();
            var position = Microphone.GetPosition(micName);

            Microphone.End(micName);             //Stop the audio recording

            var soundData = new float[audioSource.clip.samples * audioSource.clip.channels];
            audioSource.clip.GetData(soundData, 0);

            //Create shortened array for the data that was used for recording
            var newData = new float[position * audioSource.clip.channels];

            //Copy the used samples to a new array
            for (int i = 0; i < newData.Length; i++)
            {
                newData[i] = soundData[i];
            }

            //Creates a newClip with the correct length
            var newClip = AudioClip.Create(fileName,
                                           position,
                                           audioSource.clip.channels,
                                           audioSource.clip.frequency,
                                           true, false);

            newClip.SetData(newData, 0);              //Give it the data from the old clip

            //Replace the old clip
            AudioClip.Destroy(audioSource.clip);
            audioSource.clip = newClip;

            SavWav.instance.Save(fileName, audioSource.clip);
        }
        else         // Starts the recording
        {
            audioSource.clip = Microphone.Start(micName, true, 600, maxFreq);
        }
    }
Esempio n. 20
0
    void OnGUI()
    {
        //If there is at least as single microphone connected
        if (micConnected)
        {
            //For the current selected microphone, check if the audio is being captured
            recActive = Microphone.IsRecording(Microphone.devices[currentMic]);

            //If the audio from the current microphone isn't being recorded
            if (recActive == false)
            {
                //Case the 'Record' button gets pressed
                if (GUI.Button(new Rect(Screen.width / 2 - 100, Screen.height / 2 - 25, 200, 50), "Record"))
                {
                    //Start recording and store the audio captured from the selected microphone at the AudioClip in the AudioSource
                    goAudioSource.clip = Microphone.Start(Microphone.devices[currentMic], true, 20, maxFreqs[currentMic]);
                }
            }
            else             //Recording is in progress
            {
                //Case the 'Stop and Play' button gets pressed
                if (GUI.Button(new Rect(Screen.width / 2 - 100, Screen.height / 2 - 25, 200, 50), "Stop and Play!"))
                {
                    Microphone.End(Microphone.devices[currentMic]); //Stop the audio recording
                    goAudioSource.Play();                           //Playback the recorded audio
                }

                GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 + 25, 200, 50), "Recording in progress...");
            }

            //Disable the mic SelectionGrid if a recording is in progress
            GUI.enabled = !recActive;

            //Render the SelectionGrid listing all the microphones and save the selected one at 'selectedMic'
            selectedMic = GUI.SelectionGrid(new Rect(Screen.width / 2 - 210, Screen.height / 2 + 50, 420, 50), currentMic, Microphone.devices, 1);

            //If the selected microphone isn't the current microphone
            if (selectedMic != currentMic)
            {
                //Assign the value of currentMic to selectedMic
                currentMic = selectedMic;
            }
        }
        else         // No microphone
        {
            //Print a red "No microphone connected!" message at the center of the screen
            GUI.contentColor = Color.red;
            GUI.Label(new Rect(Screen.width / 2 - 100, Screen.height / 2 - 25, 200, 50), "No microphone connected!");
        }
    }
Esempio n. 21
0
        /// <summary>
        /// This event is fired when the recognizer stops, whether from StartRecording() being called, a timeout occurring, or some other error.
        /// Typically, this will simply return "Complete". In this case, we check to see if the recognizer timed out.
        /// </summary>
        /// <param name="cause">An enumerated reason for the session completing.</param>
        private void DictationRecognizer_DictationComplete(DictationCompletionCause cause)
        {
            // If Timeout occurs, the user has been silent for too long.
            if (cause == DictationCompletionCause.TimeoutExceeded)
            {
                Microphone.End(DeviceName);

                dictationResult = "Dictation has timed out. Please try again.";
            }


            textSoFar       = null;
            dictationResult = string.Empty;
        }
Esempio n. 22
0
 /// <summary>
 /// 停止录音
 /// </summary>
 public void StopRecord()
 {
     if (micArray.Length == 0)
     {
         Debug.Log("No Record Device!");
         return;
     }
     if (!Microphone.IsRecording(null))
     {
         return;
     }
     Microphone.End(null);
     Debug.Log("StopRecord");
 }
Esempio n. 23
0
        /// <summary>
        /// This event is fired when the recognizer stops, whether from StartRecording() being called, a timeout occurring, or some other error.
        /// Typically, this will simply return "Complete". In this case, we check to see if the recognizer timed out.
        /// </summary>
        /// <param name="cause">An enumerated reason for the session completing.</param>
        private static void DictationRecognizer_DictationComplete(DictationCompletionCause cause)
        {
            // If Timeout occurs, the user has been silent for too long.
            if (cause == DictationCompletionCause.TimeoutExceeded)
            {
                Microphone.End(DeviceName);

                dictationResult = "Dictation has timed out. Please try again.";
            }

            InputManager.Instance.RaiseDictationComplete(Instance, 0, dictationResult, dictationAudioClip);
            textSoFar       = null;
            dictationResult = string.Empty;
        }
Esempio n. 24
0
    public void stopListening()
    {
        //Case the 'Stop and Play' button gets pressed
        //if(Input.touchCount > 0 || Input.GetButtonDown("VuzixLeft"))
        //if(GUI.Button(new Rect(Screen.width/2-100, Screen.height/2-25, 200, 50), "Stop and Play!")
        float  filenameRand = UnityEngine.Random.Range(0.0f, 10.0f);
        string filename     = "testing" + filenameRand;

        Microphone.End(null);                                                                         //Stop the audio recordingDebug.Log( "Recording Stopped");
        if (!filename.ToLower().EndsWith(".wav"))
        {
            filename += ".wav";
        }

        //var filePath = Path.Combine("testing/", filename);
        var filePath = Application.persistentDataPath + "/" + filename;

        Debug.Log("Created filepath string: " + filePath);
        //Log.text = filePath;

        // Make sure directory exists if user is saving to sub dir.
        //Directory.CreateDirectory(Path.GetDirectoryName(filePath));
        SavWav.Save(filePath, goAudioSource.clip);                                                                          //Save a temporary Wav File
        Debug.Log("Saving @ " + filePath);
        //Log.text = "Saving @ " + filePath;
        string apiURL = "http://www.google.com/speech-api/v2/recognize?output=json&lang=en-us&key=" + apiKey;
        string Response;
        WWW    androidfile = new WWW(filePath);

        Debug.Log("Uploading " + androidfile);
        Response = HttpUploadFile(apiURL, filePath, "file", "audio/wav; rate=44100");
        Debug.Log("Response String: " + Response);
        var jsonresponse = SimpleJSON.JSON.Parse(Response);

        if (jsonresponse != null)
        {
            string resultString = jsonresponse ["result"] [0].ToString();
            var    jsonResults  = SimpleJSON.JSON.Parse(resultString);

            string transcripts = jsonResults ["alternative"] [0] ["transcript"].ToString();

            Debug.Log("transcript string: " + transcripts);
            Log = transcripts;
        }
        //goAudioSource.Play();
        //Playback the recorded audio
        //Log.text = "Playback";
        //File.Delete(filePath); //Delete the Temporary Wav file
    }
Esempio n. 25
0
    private void OnClickStopButton()
    {
        StartButton.gameObject.SetActive(false);
        StopButton.gameObject.SetActive(false);
        DescriptionText.text = "Recognizing...";
        Microphone.End(null);

        var data = Asr.ConvertAudioClipToPCM16(_clipRecord);

        StartCoroutine(_asr.Recognize(data, s =>
        {
            message            = s.result != null && s.result.Length > 0 ? s.result[0] : "未识别到声音";
            char[] messageChar = message.ToCharArray();
            int temporary      = 1;
            int chamber        = ToDigit(messageChar[(message.IndexOf("病房") - temporary)]);
            for (temporary = 1; temporary < message.IndexOf("病房");)
            { //如病房前面还有字 第六个病房 etc. 接着往前找
                if (chamber == 0)
                {
                    chamber = ToDigit(message[(message.IndexOf("病房") - (++temporary))]);
                }
                else if (chamber != 0)
                {//找到了
                    break;
                }
            }
            message2 = s.result != null && s.result.Length > 0 ? s.result[0] : "未识别到声音";
            //0none,1开灯2关灯3消毒4通风
            int operate       = 0;
            string[] operates = { "", "开灯", "关灯", "消毒", "通风" };
            for (int i = 1; i <= 4;)
            {
                int t = message2.IndexOf(operates[i]);
                if (t == (-1))
                {
                    i++;
                }
                else
                {
                    operate = i;
                    break;
                }
            }//用indexof寻找关键字,没找到返回-1 找到了赋给operate
            // chamber 1,2,3,4,5,6,7,8
            // operate 1,2,3,4
            DescriptionText.text = chamber.ToString() + "," + operate;
            StartButton.gameObject.SetActive(true);
        }));
    }
Esempio n. 26
0
    /// <summary>
    /// 停止录制
    /// </summary>
    /// <returns>返回音频保存路径</returns>
    public AudioRecordResultState StopRecord(out string filePath, out byte[] dataReture)
    {
        Debug.Log("stop record---------------");

        //Capture the current clip data
        isRecording = false;
        if (recordTimer < 0.5f)
        {
            filePath   = null;
            dataReture = null;
            return(AudioRecordResultState.TooShort);
        }

        int position  = Microphone.GetPosition(null);
        var soundData = new float[recordedClip.samples * recordedClip.channels];

        recordedClip.GetData(soundData, 0);

        //Create shortened array for the data that was used for recording
        var newData = new float[position * recordedClip.channels];


        //Copy the used samples to a new array
        for (int i = 0; i < newData.Length; i++)
        {
            newData[i] = soundData[i];
        }

        //One does not simply shorten an AudioClip,
        //    so we make a new one with the appropriate length
        recordedClip = AudioClip.Create(recordedClip.name,
                                        position,
                                        recordedClip.channels,
                                        recordedClip.frequency,
                                        false);

        recordedClip.SetData(newData, 0);        //Give it the data from the old clip

        //Replace the old clip
        Microphone.End(null);

        //save to disk
        string recordedAudioPath;

        dataReture = WavUtility.FromAudioClip(recordedClip, out recordedAudioPath, true);
        filePath   = recordedAudioPath;

        return(AudioRecordResultState.Success);
    }
        private void StopRecording()
        {
            //此时判断是否处于说话状态,如果是说话状态则将按钮复位,并上传说话内容
            if (StartSpeech)
            {
                //上传内容
                Log.Debug("麦克风停止录音");
                Microphone.End(null);
                //转换格式
                var data = Asr.ConvertAudioClipToPCM16(clipRecord);
                asr.Recognize(data, s =>
                {
                    Log.Debug("进来了");
                    if (s.result == null && s.result.Length < 0)
                    {
                        Log.Debug("结果为空,表示麦克风未识别到声音");
                        //提示有问题,复位
                        entry2.callback.AddListener(N);
                        StartSpeech = false;
                    }
                    else
                    {
                        //有结果,发送给机器人进行语音回复
                        tts.Synthesis(s.result[0], r =>
                        {
                            if (r.Success)
                            {
                                //正常播放
                                Log.Debug("合成成功,正在播放,声音有几秒:" + audioSource.clip.length);
                                audioSource.clip = r.clip;
                                audioSource.Play();
                                //复位
                                entry2.callback.AddListener(N);
                                StartSpeech = false;
                            }
                            else
                            {
                                //这是出问题了
                                Debug.Log(s.err_msg);
                                //提示有问题,复位
                                entry2.callback.AddListener(N);
                                StartSpeech = false;
                            }
                        });
                    }

                });
            }
        }
Esempio n. 28
0
    /// <summary>
    /// Stops the microphone.
    /// </summary>
    public void StopMicrophone()
    {
        if (micSelected == false)
        {
            return;
        }

        // Overriden with a clip to play? Don't stop the audio source
        if ((audioSource != null) && (audioSource.clip != null) && (audioSource.clip.name == "Microphone"))
        {
            audioSource.Stop();
        }

        Microphone.End(selectedDevice);
    }
Esempio n. 29
0
 //when pointer is clicked, begin or stop recording
 public void OnPonterClicked()
 {
     if (!Microphone.IsRecording(Microphone.devices[0]))
     {
         transform.GetChild(0).GetComponent <TextMesh>().text = "~";
         wasRecordingStopped = false;
         StartCoroutine(recordNewAudio());
     }
     else
     {
         wasRecordingStopped = true;
         lastSample          = Microphone.GetPosition(Microphone.devices[0]);
         Microphone.End(Microphone.devices[0]);
     }
 }
    //停止录音
    public void StopRecording()
    {
        Microphone.End(micDevice);
        endRecordTime = Time.realtimeSinceStartup;
        Debug.Log("录音结束");
        GetComponentInChildren <Button>().GetComponentInChildren <Text>().text = "按住录音";

        //裁切掉尾部未录音部分
        if (endRecordTime - startRecordTime < audioSource.clip.length)
        {
            audioSource.clip = CutAudioClip(audioSource.clip, 0, endRecordTime - startRecordTime);
        }

        PlayRecord(audioSource.clip);
    }