public void StopListening()
    {
        if (recordingActive)
        {
            //float[] samples = null;

            lock (thisLock)
            {
                if (recordingActive)
                {
                    StopRecording();
                    //samples = new float[audioSource.clip.samples];

                    //audioSource.clip.GetData(samples, 0);
                    bytes = WavUtil.FromAudioClip(audioSource.clip);
                    // audioSource.Play();
                    // Debug.Log("This is the audiosource clip length: " + bytes.Length);
                    audioSource = null;
                }
            }

            //new Thread(StartVoiceRequest).Start(samples);
            StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/" + PROJECT_ID + "/agent/sessions/34563:detectIntent",
                                             ACCESS_TOKEN,
                                             bytes));
        }
    }
    IEnumerator StartVoiceRequest(String url, String AccessToken, object parameter)
    {
        byte[] samples = (byte[])parameter;
        //TODO: convert float[] samples into bytes[]
        //byte[] sampleByte = new byte[samples.Length * 4];
        //Buffer.BlockCopy(samples, 0, sampleByte, 0, sampleByte.Length);

        string sampleString = System.Convert.ToBase64String(samples);

        if (samples != null)
        {
            UnityWebRequest postRequest = new UnityWebRequest(url, "POST");
            RequestBody     requestBody = new RequestBody();
            requestBody.queryInput             = new QueryInput();
            requestBody.queryInput.audioConfig = new InputAudioConfig();
            requestBody.queryInput.audioConfig.audioEncoding = AudioEncoding.AUDIO_ENCODING_UNSPECIFIED;
            //TODO: check if that the sample rate hertz
            requestBody.queryInput.audioConfig.sampleRateHertz = 16000;
            requestBody.queryInput.audioConfig.languageCode    = "en";
            requestBody.inputAudio = sampleString;


            string jsonRequestBody = JsonUtility.ToJson(requestBody, true);
            Debug.Log(jsonRequestBody);

            byte[] bodyRaw = System.Text.Encoding.UTF8.GetBytes(jsonRequestBody);
            postRequest.SetRequestHeader("Authorization", "Bearer " + AccessToken);
            postRequest.uploadHandler   = (UploadHandler) new UploadHandlerRaw(bodyRaw);
            postRequest.downloadHandler = (DownloadHandler) new DownloadHandlerBuffer();
            //postRequest.SetRequestHeader("Content-Type", "application/json");

            yield return(postRequest.SendWebRequest());

            if (postRequest.isNetworkError || postRequest.isHttpError)
            {
                Debug.Log(postRequest.responseCode);
                Debug.Log(postRequest.error);
            }
            else
            {
                Debug.Log("Response: " + postRequest.downloadHandler.text);

                // Or retrieve results as binary data
                byte[]       resultbyte = postRequest.downloadHandler.data;
                string       result     = System.Text.Encoding.UTF8.GetString(resultbyte);
                ResponseBody content    = (ResponseBody)JsonUtility.FromJson <ResponseBody>(result);

                byte[]    wavBytes  = System.Convert.FromBase64String(content.outputAudio);
                AudioClip audioClip = WavUtil.ToAudioClip(wavBytes, 0);
                StartCoroutine(myScript.WaitForEnd(audioClip));
                this.contexts = content.queryResult.outputContexts;
                Debug.Log(content.queryResult.fulfillmentText);
            }
        }
        else
        {
            Debug.LogError("The audio file is null");
        }
    }
Example #3
0
    public void FromVegas(Vegas vegas)
    {
        this.vegas   = vegas;
        this.util    = new Util(vegas);
        this.wavUtil = new WavUtil(vegas, util);

        ProcessBlips();
        ProcessMidi();
    }
Example #4
0
    public void FromVegas(Vegas vegas)
    {
        this.vegas   = vegas;
        this.util    = new Util(vegas);
        this.wavUtil = new WavUtil(vegas, util);

        processBlips();

        foreach (WavUtil.Blip blip in blips)
        {
            vegas.Project.Markers.Add(new Marker(new Timecode(blip.locationInMicroseconds / 1000) + selectedVideoEvent.Start, blip.pulseCount + ""));
        }
    }
Example #5
0
    public void FromVegas(Vegas vegas)
    {
        this.vegas   = vegas;
        this.util    = new Util(vegas);
        this.wavUtil = new WavUtil(vegas, util);

        VideoEvent videoEvent = util.GetFirstSelectedVideoEvent();

        if (videoEvent == null)
        {
            util.ShowError("No video event selected");
            return;
        }

        //Create a temporary WAV file, and export the audio span of selected video event
        string wavePath = wavUtil.CreateVideoEventWAV(videoEvent);

        if (wavePath == null)
        {
            util.ShowError("Unable to export temporary WAV");
            return;
        }

        short[] leftChannel, rightChannel;
        bool    wavReadStatus = wavUtil.ReadWav(wavePath, out leftChannel, out rightChannel);

        if (wavReadStatus == false)
        {
            util.ShowError("Unable to read WAV export file.");
            return;
        }

        //Delete the temporary file
        File.Delete(wavePath);

        //Find all blips in the left channel and split tracks at blips
        List <WavUtil.Blip> blips = wavUtil.FindBlips(rightChannel);

        wavUtil.SplitAtBlips(videoEvent, blips);
    }