示例#1
0
    public void FixedUpdate()
    {
        if (animateAgent)
        {
            AnimationParametersFrame currentFAPFrame = null;
            AnimationParametersFrame currentBAPFrame = null;

            AudioElement currentAudio = null;

            // Update of frames
            if (distantConnection)
            {
                // uses THRIFT for updating animation

                if (!thriftConsumerOpened)
                {
                    // standard connection
                    if (!fapReceiver.isConnected() && !fapReceiver.isOnConnection())
                    {
                        fapReceiver.startConnection();
                    }
                    else if (!bapReceiver.isConnected() && !bapReceiver.isOnConnection() && fapReceiver.isConnected())
                    {
                        Debug.Log("FAP Receiver started");
                        bapReceiver.startConnection();
                    }
                    else if (!audioReceiver.isConnected() && !audioReceiver.isOnConnection() && bapReceiver.isConnected())
                    {
                        Debug.Log("BAP Receiver started");
                        audioReceiver.startConnection();
                    }
                    else if (!commandSender.isConnected() && !commandSender.isOnConnection() && audioReceiver.isConnected())
                    {
                        Debug.Log("Audio Receiver started");
                        commandSender.startConnection();
                    }
                    else if (commandSender.isConnected())
                    {
                        Debug.Log("Connection Sender started");
                        thriftConsumerOpened = true;
                    }
                }
                else
                {
                    // FAP animation
                    if (fapReceiver.timer.isSynchronized())
                    {
                        //if (SceneManager.gretaClock <= 0)
                        characterTimer.setTimeMillis(fapReceiver.timer.getTimeMillis() - ANIM_DELAY);// the ANIM_DELAY is to take into account delays on the network
                        SceneManager.gretaClock = (float)characterTimer.getTimeMillis();
                        // Debug.Log(fapReceiver.timer.getTimeMillis()/40 );
                        //currentFAPFrame = fapReceiver.getCurrentFrame (fapReceiver.timer.getTimeMillis () / 40);
                        currentFAPFrame = fapReceiver.getCurrentFrame(characterTimer.getTimeMillis() / 40);
                    }
                    // BAP Animation
                    if (bapReceiver.timer.isSynchronized())
                    {
                        if (SceneManager.gretaClock <= 0)
                        {
                            characterTimer.setTimeMillis(bapReceiver.timer.getTimeMillis() - ANIM_DELAY);// the ANIM_DELAY is to take into account delays on the network
                            SceneManager.gretaClock = (float)(characterTimer.getTimeMillis());
                        }
                        currentBAPFrame = bapReceiver.getCurrentFrame(characterTimer.getTimeMillis() / 40);
                    }
                    // AudioBuffer
                    if (fapReceiver.timer.isSynchronized())
                    { // consumer AUDIO Buffer
                        currentAudio = audioReceiver.getCurrentAudioElement(characterTimer.getTimeMillis() / 40);
                    }
                }
            }

            // Animates agent using local files
            else
            {
                if (fapReceiver.isConnected())
                {
                    fapReceiver.stopConnector();
                    thriftConsumerOpened = false;
                }
                if (bapReceiver.isConnected())
                {
                    bapReceiver.stopConnector();
                    thriftConsumerOpened = false;
                }
                if (audioReceiver.isConnected())
                {
                    audioReceiver.stopConnector();
                    thriftConsumerOpened = false;
                }
            }

            // Update of animation
            if (currentFAPFrame != null)
            {
                if (lastFAPFrame.isEqualTo(currentFAPFrame))
                {
                    cptFrames++;
                    if (cptFrames > 2)
                    {
                        agentPlaying = false;
                        cptFrames    = 0;
                    }
                }
                else
                {
                    agentPlaying = true;
                    cptFrames    = 0;
                    lastFAPFrame = new AnimationParametersFrame(currentFAPFrame);
                }

                applyFapFrame(currentFAPFrame);
            }
            if (currentBAPFrame != null)
            {
                if (lastBAPFrame.isEqualTo(currentBAPFrame))
                {
                    cptFrames++;
                    if (cptFrames > 2)
                    {
                        agentPlaying = false;
                        cptFrames    = 0;
                    }
                }
                else
                {
                    agentPlaying = true;
                    cptFrames    = 0;
                    lastBAPFrame = new AnimationParametersFrame(currentBAPFrame);
                }

                applyBapFrame(currentBAPFrame);
            }

            /*EB : START TEST FOR AUDIO BUFFER*/
            if (audioFilePlayer.isNewAudio() || audioReceiver.isNewAudio())
            {
                //EB : I reconstructed the short values computed by cereproc from the byte buffer sent by VIB
                // and used this short value to fill the float buffer needed by the audio clip
                if (currentAudio.getSampleRate() > 0 && currentAudio.rawData.Length > 0)
                {
                    int len = currentAudio.rawData.Length / 2;
                    //EB: I couldn't find in Unity how to clean an audio clip nor how to modify its buffer length,
                    // so I prefered to destroy the audio clip (to free the memory) and to create an audio clip
                    // which has the appropriate float buffer size.
                    // In theory the frequency should be provided by the currentAudio object (which should
                    // receive such an information in the message from VIB), but since this is not the case
                    // I hard coded the frequency (47250). It works fine with cereproc, but not with MaryTTS.
                    // For Mary you need to set the frequency to 16000. This is ugly, really!
                    // It should be a input and not hard coded. The problem is that the thrift message doesn't
                    // contain the information at all and I don't want to put my hands in that part of your code.
                    Destroy(_currentAudioSource.clip);

                    _currentAudioSource.clip = AudioClip.Create("text", len, 1, currentAudio.getSampleRate(), false);
                    float[] buffer = new float[len];
                    for (int iPCM = 44; iPCM < len; iPCM++)
                    {
                        float f;
                        short i = (short)((currentAudio.rawData[iPCM * 2 + 1] << 8) | currentAudio.rawData[iPCM * 2]);
                        f = ((float)i) / (float)32768;
                        if (f > 1)
                        {
                            f = 1;
                        }
                        if (f < -1)
                        {
                            f = -1;
                        }
                        buffer[iPCM] = f;
                    }
                    _currentAudioSource.clip.SetData(buffer, 0);
                    _currentAudioSource.Play();

                    audioReceiver.setNewAudio(false);
                    audioFilePlayer.setNewAudio(false);
                }
                else
                {
                    if ((_currentAudioSource != null) && (_currentAudioSource.clip != null))
                    {
                        float offSet        = ((float)characterTimer.getTimeMillis() - ((float)currentAudio.getFrameNumber() * 40)) / 1000;
                        int   samplesOffset = (int)(_currentAudioSource.clip.frequency * offSet * _currentAudioSource.clip.channels);
                        _currentAudioSource.timeSamples = samplesOffset;
                        _currentAudioSource.Play();
                    }
                    audioReceiver.setNewAudio(false);
                    audioFilePlayer.setNewAudio(false);
                }
            }
        }
        else
        {
            if (_currentAudioSource != null)
            {
                _currentAudioSource.Stop();
            }
        }
        if (animationIDold != animationID)
        {
            PlayAgentAnimation(animationID);
            animationIDold = animationID;
        }
    }