Ejemplo n.º 1
0
    /// <summary>
    /// Stops the microphone.
    /// </summary>
    public void StopMicrophone()
    {
#if PHOTON_SOLUTION
        //do not use this mic if using photon voice
        if (GameState.Instance.streamingSolution == EStreamingSolution.PhotonVoice)
        {
            return;
        }
#endif

        if (micSelected == false)
        {
            return;
        }

        // Overriden with a clip to play? Don't stop the audio source
        if ((audioSource != null) &&
            (audioSource.clip != null) &&
            (audioSource.clip.name == "Microphone"))
        {
            audioSource.Stop();
        }

        // Reset to stop mouth movement
        OVRLipSyncContext context = GetComponent <OVRLipSyncContext>();
        context.ResetContext();

        Microphone.End(selectedDevice);
    }
    /// <summary>
    /// Start this instance.
    /// </summary>
    void Start()
    {
        // morph target needs to be set manually; possibly other components will need the same
        if (skinnedMeshRenderer == null)
        {
            Debug.Log("LipSyncContextMorphTarget.Start WARNING: Please set required public components!");
            return;
        }

        // make sure there is a phoneme context assigned to this object
        lipsyncContext = GetComponent <OVRLipSyncContext>();
        if (lipsyncContext == null)
        {
            Debug.Log("LipSyncContextMorphTarget.Start WARNING: No phoneme context component set to object");
        }

        // Can we record and play sequences?
        sequencer = GetComponent <OVRLipSyncContextSequencer>();

        if (sequencer == null)
        {
            Debug.Log("LipSyncContextMorphTarget.Start: No sequencer set. Ability to record and playback keystrokes disabled.");
        }

        // Send smoothing amount to context
        lipsyncContext.SendSignal(OVRLipSync.ovrLipSyncSignals.VisemeSmoothing, SmoothAmount, 0);
    }
Ejemplo n.º 3
0
 void Inject(OVRLipSyncContext context)
 {
     if (PhotonView.Get(this).isMine)
     {
         return;                             //自身の生成物の場合は無視
     }
     context.audioSource = GetComponent <AudioSource>();
 }
Ejemplo n.º 4
0
    public void OnEvent(ExitGames.Client.Photon.EventData photonEvent)
    {
        if (photonEvent.Code == (byte)VRMLiveMotionEventCode.SetHumanPoseTransferSource)
        {
            Debug.Log("OnEvent: EventCode is SetHumanPoseTransferSource");

            int receivedViewID = (int)photonEvent.Parameters[ParameterCode.Data];

            GameObject humanPoseSynchronizer = PhotonView.Find(receivedViewID).gameObject;
            m_source = humanPoseSynchronizer.GetComponent <UniHumanoid.HumanPoseTransfer>();
            humanPoseSynchronizer.GetComponent <Renderer>().enabled = false;

            SetupTarget();
        }

        if (photonEvent.Code == (byte)VRMLiveMotionEventCode.SetLipSync)
        {
            Debug.Log("OnEvent: EventCode is SetLipSync.");
            int receivedViewID = (int)photonEvent.Parameters[ParameterCode.Data];

            GameObject photonVoiceSpeaker = PhotonView.Find(receivedViewID).gameObject;
            lipSyncContext           = photonVoiceSpeaker.AddComponent <OVRLipSyncContext>();
            lipSyncContext.audioMute = false;

            var morphTarget = m_VRMObj.GetComponent <VRMLipSyncMorphTarget>();
            if (morphTarget != null)
            {
                morphTarget.lipsyncContext = lipSyncContext;
            }
        }

        if (photonEvent.Code == (byte)VRMLiveMotionEventCode.SetAvatarPositionSynchronizer)
        {
            Debug.Log("OnEvent: EventCode is SetAvatarPositionSynchronizer");
            int receivedViewID = (int)photonEvent.Parameters[ParameterCode.Data];
            var synchronizer   = PhotonView.Find(receivedViewID).gameObject.GetComponent <AvatarPositionSynchronizer>();

            if (synchronizer != null)
            {
                if (m_VRMObj != null)
                {
                    synchronizer.AvatarPositionTransform = m_VRMObj.transform;
                }
                else
                {
                    m_avatarPositionSynchronizer = synchronizer;
                }
            }
        }
    }
Ejemplo n.º 5
0
    private IEnumerator PhotonVoiceInstantiationForLocalAvatar()
    {
        Debug.Log("[PUN] OVRAvatar completed instantiation of LocalAvatar now we setup voice by adding Speaker,Recorder,VoiceView ");

        //get audiosource from the localavatar
        while (localAvatar.GetComponentInChildren <AudioSource>() == null)
        {
            yield return(new WaitForSeconds(0.1f));
        }
        AudioSource audioSource = localAvatar.GetComponentInChildren <AudioSource>();

        //////get the ovr
        while (audioSource.gameObject.GetComponent <OVRLipSyncContext>() == null)
        {
            yield return(new WaitForSeconds(0.1f));
        }
        OVRLipSyncContext LipSyncContext = audioSource.gameObject.GetComponent <OVRLipSyncContext>();

        LipSyncContext.audioSource = audioSource;
        if (voiceDebug)
        {
            LipSyncContext.audioLoopback = true;             // Don't use mic.
        }
        else
        {
            LipSyncContext.audioLoopback = false;
        }
        LipSyncContext.skipAudioSource = false;

        ////add speaker to the element which holds the audio source
        Speaker speaker = audioSource.gameObject.AddComponent <Speaker>();

        ////add recorder to the element that has the photonView
        Recorder recorder = photonView.gameObject.AddComponent <Recorder>();

        recorder.DebugEchoMode = true;

        ////add Photonvoice view to the local avatar
        PhotonVoiceView voiceView = photonView.gameObject.AddComponent <PhotonVoiceView>();

        voiceView.RecorderInUse     = recorder;
        voiceView.SpeakerInUse      = speaker;
        voiceView.SetupDebugSpeaker = true;

        ////start transmission
        yield return(voiceView.RecorderInUse.TransmitEnabled = true);

        voiceView.RecorderInUse.StartRecording();
    }
Ejemplo n.º 6
0
    /// <summary>
    /// Start this instance.
    /// </summary>
    void Start()
    {
        // morph target needs to be set manually; possibly other components will need the same
        if (material == null)
        {
            Debug.Log("LipSyncContextTextureFlip.Start WARNING: Please set required public components!");
            return;
        }

        // make sure there is a phoneme context assigned to this object
        phonemeContext = GetComponent <OVRLipSyncContext>();
        if (phonemeContext == null)
        {
            Debug.Log("LipSyncContextTextureFlip.Start WARNING: No phoneme context component set to object");
        }
    }
Ejemplo n.º 7
0
    /// <summary>
    /// Stops the microphone.
    /// </summary>
    public void StopMicrophone()
    {
        if (micSelected == false)
        {
            return;
        }

        // Overriden with a clip to play? Don't stop the audio source
        if ((audioSource != null) &&
            (audioSource.clip != null) &&
            (audioSource.clip.name == "Microphone"))
        {
            audioSource.Stop();
        }

        // Reset to stop mouth movement
        OVRLipSyncContext context = GetComponent <OVRLipSyncContext>();

        context.ResetContext();

        Microphone.End(selectedDevice);
    }
Ejemplo n.º 8
0
    private void SetupLipSync()
    {
        VRMBlendShapeProxy    vrmBlendShapeProxy = GetComponentInChildren <VRMBlendShapeProxy>();
        VRMLipSyncMorphTarget morph_target       = vrmBlendShapeProxy.gameObject.AddComponent <VRMLipSyncMorphTarget>();

        morph_target.blendShapeProxy = vrmBlendShapeProxy;

        OVRLipSyncContext lipsync_context = m_PhotonVoice.GetComponent <OVRLipSyncContext>();

        morph_target.lipsyncContext = lipsync_context;

        if (m_PhotonView == null)
        {
            Debug.LogWarning("LipSyncSetupper PhotonView is null");
            return;
        }

        if (m_PhotonView.IsMine == false)
        {
            lipsync_context.audioLoopback = true;
        }
    }
Ejemplo n.º 9
0
    private void Awake()
    {
        m_VHPmanager = gameObject.GetComponent <VHPManager>();

        m_OVRLipSyncContext = transform.GetComponent <OVRLipSyncContext>();

        LoadBlendShapeValues();

        m_visemesBlendShapeValues.Add(m_viseme_sil_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_PP_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_FF_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_TH_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_DD_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_kk_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_CH_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_SS_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_nn_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_RR_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_aa_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_E_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_I_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_O_BlendShapeValues);
        m_visemesBlendShapeValues.Add(m_viseme_U_BlendShapeValues);
    }
    private void ConfigureHelpers()
    {
        Transform head =
            transform.Find("body/body_renderPart_0/root_JNT/body_JNT/chest_JNT/neckBase_JNT/neck_JNT/head_JNT");

        if (head == null)
        {
            AvatarLogger.LogError("Avatar helper config failed. Cannot find head transform. All helpers spawning on root avatar transform");
            head = transform;
        }

        if (MouthAnchor == null)
        {
            MouthAnchor = CreateHelperObject(head, MOUTH_HEAD_OFFSET, MOUTH_HELPER_NAME);
        }

        if (GetComponent <OvrAvatarLocalDriver>() != null)
        {
            if (audioSource == null)
            {
                audioSource = MouthAnchor.gameObject.AddComponent <AudioSource>();
            }
            spatializedSource = MouthAnchor.GetComponent <ONSPAudioSource>();

            if (spatializedSource == null)
            {
                spatializedSource = MouthAnchor.gameObject.AddComponent <ONSPAudioSource>();
            }

            spatializedSource.UseInvSqr            = true;
            spatializedSource.EnableRfl            = false;
            spatializedSource.EnableSpatialization = true;
            spatializedSource.Far  = 100f;
            spatializedSource.Near = 0.1f;

            // Add phoneme context to the mouth anchor
            lipsyncContext = MouthAnchor.GetComponent <OVRLipSyncContext>();
            if (lipsyncContext == null)
            {
                lipsyncContext = MouthAnchor.gameObject.AddComponent <OVRLipSyncContext>();
            }

            lipsyncContext.provider = EnableLaughter
                ? OVRLipSync.ContextProviders.Enhanced_with_Laughter
                : OVRLipSync.ContextProviders.Enhanced;

            // Ignore audio callback if microphone is owned by VoIP
            lipsyncContext.skipAudioSource = !CanOwnMicrophone;

            StartCoroutine(WaitForMouthAudioSource());
        }

        if (GetComponent <OvrAvatarRemoteDriver>() != null)
        {
            GazeTarget headTarget = head.gameObject.AddComponent <GazeTarget>();
            headTarget.Type = ovrAvatarGazeTargetType.AvatarHead;
            AvatarLogger.Log("Added head as gaze target");

            Transform hand = transform.Find("hand_left");
            if (hand == null)
            {
                AvatarLogger.LogWarning("Gaze target helper config failed: Cannot find left hand transform");
            }
            else
            {
                GazeTarget handTarget = hand.gameObject.AddComponent <GazeTarget>();
                handTarget.Type = ovrAvatarGazeTargetType.AvatarHand;
                AvatarLogger.Log("Added left hand as gaze target");
            }

            hand = transform.Find("hand_right");
            if (hand == null)
            {
                AvatarLogger.Log("Gaze target helper config failed: Cannot find right hand transform");
            }
            else
            {
                GazeTarget handTarget = hand.gameObject.AddComponent <GazeTarget>();
                handTarget.Type = ovrAvatarGazeTargetType.AvatarHand;
                AvatarLogger.Log("Added right hand as gaze target");
            }
        }
    }
Ejemplo n.º 11
0
    private void FinishAvatar()
    {
        GameObject          avatar = playerObj.transform.Find("Avatar").gameObject;
        GameObject          head   = avatar.transform.Find("HeadObject").gameObject;
        SkinnedMeshRenderer skinnedMeshRenderer = head.GetComponent <SkinnedMeshRenderer>();

        // Add audio source to player for speaking sound
        AudioSource audioSource = avatar.AddComponent <AudioSource>();

        OVRLipSyncContext ovrContext = avatar.AddComponent <OVRLipSyncContext>();

        ovrContext.audioSource = audioSource;
        ovrContext.audioMute   = false;

        OVRLipSyncContextMorphTarget ovrMorphTarget = avatar.AddComponent <OVRLipSyncContextMorphTarget>();

        ovrMorphTarget.skinnedMeshRenderer = skinnedMeshRenderer;

        ovrMorphTarget.VisemeToBlendTargets[0]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("sil");
        ovrMorphTarget.VisemeToBlendTargets[1]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("pp");
        ovrMorphTarget.VisemeToBlendTargets[2]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("ff");
        ovrMorphTarget.VisemeToBlendTargets[3]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("TH");
        ovrMorphTarget.VisemeToBlendTargets[4]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("dd");
        ovrMorphTarget.VisemeToBlendTargets[5]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("kk");
        ovrMorphTarget.VisemeToBlendTargets[6]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("CH");
        ovrMorphTarget.VisemeToBlendTargets[7]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("ss");
        ovrMorphTarget.VisemeToBlendTargets[8]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("nn");
        ovrMorphTarget.VisemeToBlendTargets[9]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("rr");
        ovrMorphTarget.VisemeToBlendTargets[10] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("AA");
        ovrMorphTarget.VisemeToBlendTargets[11] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EE");
        ovrMorphTarget.VisemeToBlendTargets[12] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("IH");
        ovrMorphTarget.VisemeToBlendTargets[13] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("OH");
        ovrMorphTarget.VisemeToBlendTargets[14] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("OU");

        ovrMorphTarget.KeySendVisemeSignal     = new int[15];
        ovrMorphTarget.KeySendVisemeSignal[0]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("sil");
        ovrMorphTarget.KeySendVisemeSignal[1]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("pp");
        ovrMorphTarget.KeySendVisemeSignal[2]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("ff");
        ovrMorphTarget.KeySendVisemeSignal[3]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("TH");
        ovrMorphTarget.KeySendVisemeSignal[4]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("dd");
        ovrMorphTarget.KeySendVisemeSignal[5]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("kk");
        ovrMorphTarget.KeySendVisemeSignal[6]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("CH");
        ovrMorphTarget.KeySendVisemeSignal[7]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("ss");
        ovrMorphTarget.KeySendVisemeSignal[8]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("nn");
        ovrMorphTarget.KeySendVisemeSignal[9]  = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("rr");
        ovrMorphTarget.KeySendVisemeSignal[10] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("AA");
        ovrMorphTarget.KeySendVisemeSignal[11] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EE");
        ovrMorphTarget.KeySendVisemeSignal[12] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("IH");
        ovrMorphTarget.KeySendVisemeSignal[13] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("OH");
        ovrMorphTarget.KeySendVisemeSignal[14] = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("OU");

        ovrMorphTarget.SmoothAmount = 10;

        ovrContext.enabled = false;
        ovrContext.enabled = true;

        EyeController eyeController = avatar.AddComponent <EyeController>();

        eyeController.head          = avatar.transform.Find("HeadObject").gameObject;
        eyeController.positiveXAxis = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EyesRight");
        eyeController.positiveYAxis = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EyesUp");
        eyeController.negativeXAxis = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EyesLeft");
        eyeController.negativeYAxis = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EyesDown");

        eyeController.blinkL = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EyeBlink_L");
        eyeController.blinkR = skinnedMeshRenderer.sharedMesh.GetBlendShapeIndex("EyeBlink_R");

        // Set camera
        if (cameraObj != null)
        {
            eyeController.target = cameraObj.transform;
            cameraObj.GetComponent <CameraScript>().target = playerObj.transform;
        }

        // send finish command
        AppSocket.SendCommand(AppSocket.SendType.AVATAR_FINISH);
    }
Ejemplo n.º 12
0
 void Awake()
 {
     context = GetComponent <OVRLipSyncContext>();
 }