예제 #1
0
    void OnGUI()
    {
        GUIStyle style = new GUIStyle(GUI.skin.textField);

        style.fixedWidth  = Screen.width;
        style.fixedHeight = 100;

        this.text = GUILayout.TextArea(this.text, style);

        if (!this.isCalibrating)
        {
            if (GUI.Button(new Rect(70, 100, 150, 100), "Record"))
            {
                this.isAborting = false;
                StartRecording();
            }

            if (GUI.Button(new Rect(70, 250, 150, 100), "Abort"))
            {
                this.isAborting = true;
            }
        }

        if (this.currentRecordingState != null && this.currentRecordingState.StateName == RecordingState.EndRecording)
        {
            if (GUI.Button(new Rect(70, 350, 150, 100), "Play"))
            {
                string path = RecordingMachineBase.GetWavFilePath();
                StartCoroutine(PlayRecordedAudio(path, this.lipsyncChannels));
            }
        }
    }
예제 #2
0
    public void SetRecordingState(RecordingMachineBase newState)
    {
        if (this.currentRecordingState != null)
        {
            this.currentRecordingState.ExitState();
        }

        this.currentRecordingState = newState;

        if (currentRecordingState != null)
        {
            currentRecordingState.EnterState();
        }
    }
예제 #3
0
    public void HandleRecordingState()
    {
        if (this.currentRecordingState == null)
        {
            return;
        }

        RecordingMachineBase newMachine = this.currentRecordingState.HandleState();

        //If current state has changed we transition to the new state.
        if (newMachine != null)
        {
            SetRecordingState(newMachine);
        }
    }
예제 #4
0
    void Start()
    {
        Application.targetFrameRate = 30;

        this.device = new StreamedMicrophone();

        //Set microphone to native device implementation.
        RecordingMachineBase.SetRecordingDevice(device);

                #if UNITY_IOS && !UNITY_EDITOR
        this.animationsDatabase = AssetBundle.LoadFromFile(Application.streamingAssetsPath + "/FacialAnimations/iOS/FacialAnimationsDatabase.facialanimation");
                #else
        this.animationsDatabase = AssetBundle.LoadFromFile(Application.streamingAssetsPath + "/FacialAnimations/Standalone/FacialAnimationsDatabase.facialanimation");
                #endif

        if (this.animationsDatabase != null)
        {
            GameObject mainGO = this.animationsDatabase.mainAsset as GameObject;
            GameObject.Instantiate(mainGO);
        }

        this.Actor = GameObject.Find("deadtrigger2.buck");

        this.animationTarget = this.Actor.GetComponent <FacialAnimation>();

        if (this.animationTarget != null)
        {
            Transform headTransform = TransformHelp.FindChildRecursive(this.Actor.transform, "BaseHead");

            this.animationTarget.SetTargetMesh(headTransform.GetComponent <SkinnedMeshRenderer>());
        }

                #if UNITY_IOS && !UNITY_EDITOR
        MicrophoneWrappers.RequestMicrophoneAccess((bool access) => {
            Debug.Log("Mic access: " + access);
        });
                #endif

        AudioConfiguration config = AudioSettings.GetConfiguration();

        config.sampleRate    = 44100;
        config.dspBufferSize = 512;
        config.speakerMode   = AudioSpeakerMode.Mono;

        AudioSettings.Reset(config);
    }
예제 #5
0
    void StartRecording()
    {
        bool availible = this.device.IsMicrophoneAvailible();

        //Make sure we have a microphone.
        if (!availible)
        {
            return;
        }

        //This will estimate for how long we are to record.
        float estimatedSpeakTime = TextAnalysisTools.EsimateTextLength(this.text, this.syllablesPerMinute) + RecordingMachineBase.RECORD_PADDING;

        UnityEngine.Debug.Log("Speak Time: " + estimatedSpeakTime);

        //Clear states between each run.
        ClearRecordingState();

        RecordingMachineBase.SetRecordingFilePath(Application.persistentDataPath + "/" + Time.realtimeSinceStartup + ".wav");
        RecordingMachineBase.SetCalibrationFilePath(Application.persistentDataPath + "/microphonecalibration.json");

        //Setup initial state where our record state will begin.
        RecordingMachineBase begin = new BeginRecordingMachine(estimatedSpeakTime);

        //Starting here.
        SetRecordingState(begin);

        FacialAnimationClip clip = this.animationTarget.ConstructFacialAnimationClip("dialogue", "neutral", "StandingNormal");

        //Make sure we call this after record state is setup.
        this.voiceAnimation = new VoiceRecordingAnimation(clip, this.animationTarget, estimatedSpeakTime, null);

        this.voiceAnimation.SubscribeOnFinished((List <IAnimationChannel> lipsyncChannels, List <PhonemeContainer> phonemes, string phonemePath) => {
            this.lipsyncChannels = lipsyncChannels;
        });

        //Couple callbacks from microphone to the animation component.
        this.device.AddHandlerToBufferChanged(voiceAnimation.UpdateAnimation);

        //Let animation component know when we are done.
        this.device.AddHandlerToBufferFinished(voiceAnimation.FinalizeAnimation);


        StartCoroutine(ProcessRecordingState());
    }
예제 #6
0
 public void ClearRecordingState()
 {
     this.currentRecordingState = null;
 }