Example #1
0
    public VoiceRecordingAnimation(FacialAnimationClip clip, FacialAnimation animationTarget, float estimatedSpeakTime, string phonemesPath)
    {
        this.streamedAnimationClip = clip;
        this.animationTarget       = animationTarget;
        this.phonemesPath          = phonemesPath;
        this.lipSyncer             = new LipSyncData();

        //Used to size our animation channels, this way the internal array in the keyframes list should be big enough.
        this.estimatedSpeakFrames = (int)(FacialAnimation.FRAMERATE * estimatedSpeakTime);

        //Create all our streamed lipsync channels.
        this.streamedLipsyncChannels = new List <IAnimationChannel>(7);
        this.streamedLipsyncChannels.Add(new RotationChannel("Mid_Head_Jnt_03", new List <Vector3>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Corner_In", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("I", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_LowerUp", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_LowerDown", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_UpperUp", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_UpperDown", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));

        //Add our streamed channels. This will also remove any previous channel with the same name.
        for (int i = 0; i < this.streamedLipsyncChannels.Count; i++)
        {
            IAnimationChannel channel = this.streamedLipsyncChannels[i];
            this.streamedAnimationClip.AddAnimationChannel(channel);
        }

        //Stream data into these animation channels.
        this.lipSyncer.SetLipSyncChannels(this.streamedLipsyncChannels);

        this.hasBegunAnimating   = false;
        this.hasStartedStreaming = false;

        #if UNITY_EDITOR && DEBUG_MODE
        //Close previous instance of window.
        if (visemeVisualizer != null)
        {
            visemeVisualizer.Close();
        }

        EditorApplication.ExecuteMenuItem("Plotagon/VisemeVisualizer");
        visemeVisualizer = EditorWindow.focusedWindow;

        //Allows us to see results in unitys editor window.
        for (int i = 0; i < this.streamedLipsyncChannels.Count; i++)
        {
            IAnimationChannel channel = this.streamedLipsyncChannels[i];
            channel.AddDebugWindow(visemeVisualizer);
        }
        #endif
    }
Example #2
0
    void Start()
    {
        Application.targetFrameRate = 30;

        this.device = new StreamedMicrophone();

        //Set microphone to native device implementation.
        RecordingMachineBase.SetRecordingDevice(device);

                #if UNITY_IOS && !UNITY_EDITOR
        this.animationsDatabase = AssetBundle.LoadFromFile(Application.streamingAssetsPath + "/FacialAnimations/iOS/FacialAnimationsDatabase.facialanimation");
                #else
        this.animationsDatabase = AssetBundle.LoadFromFile(Application.streamingAssetsPath + "/FacialAnimations/Standalone/FacialAnimationsDatabase.facialanimation");
                #endif

        if (this.animationsDatabase != null)
        {
            GameObject mainGO = this.animationsDatabase.mainAsset as GameObject;
            GameObject.Instantiate(mainGO);
        }

        this.Actor = GameObject.Find("deadtrigger2.buck");

        this.animationTarget = this.Actor.GetComponent <FacialAnimation>();

        if (this.animationTarget != null)
        {
            Transform headTransform = TransformHelp.FindChildRecursive(this.Actor.transform, "BaseHead");

            this.animationTarget.SetTargetMesh(headTransform.GetComponent <SkinnedMeshRenderer>());
        }

                #if UNITY_IOS && !UNITY_EDITOR
        MicrophoneWrappers.RequestMicrophoneAccess((bool access) => {
            Debug.Log("Mic access: " + access);
        });
                #endif

        AudioConfiguration config = AudioSettings.GetConfiguration();

        config.sampleRate    = 44100;
        config.dspBufferSize = 512;
        config.speakerMode   = AudioSpeakerMode.Mono;

        AudioSettings.Reset(config);
    }
    void Start()
    {
        rb              = GetComponent <Rigidbody>();
        playerPickUp    = GetComponent <PlayerPickUp>();
        facialAnimation = GetComponent <FacialAnimation>();

        startMaxSpeed = maxSpeed;

        SetHammerVisibility(false);

        if (gameObject.tag == "Player2")
        {
            playerID = 1;
        }
        else
        {
            playerID = 0;
        }

        player = ReInput.players.GetPlayer(playerID);
    }
Example #4
0
        // フェイスアニメーションを設定
        public void SetFace(FacialAnimation face, float duration)
        {
            animator.CrossFade(Names[(int)face], FadeDuration, this.layerIndex);

            this.endTime = Time.time + Math.Max(duration - FadeDuration, 0.0f);
        }
Example #5
0
 // フェイスアニメーションを設定
 public void SetFace(FacialAnimation face)
 {
     SetFace(face, DefaultDuration);
 }