// Activates speech audio, speech animation, and action triggers for the prompt
    public void RunActionsAndSpeechForLine(Prompt line)
    {
        if (line.hasAudio)
        {
            AnimationAudio voiceLine = line.animatedSpeechLine;
            speechController.PlayClipAndStartAnimatingFace(voiceLine);
        }

        if (line.lookAtFriend)
        {
            if (line.lookAtCurrentFriend && currentFriend != null)
            {
                LookAtPerson(line.lookDuration, currentFriend.face, line.matchLookDurationToSpeechTime);
            }
            else
            {
                bool foundFriendName = false;
                foreach (ConversationStarter speechLine in speechController.friendLines)
                {
                    if (speechLine.friendName == line.nameOfFriendToLookAt && !foundFriendName)
                    {
                        LookAtPerson(line.lookDuration, GameObject.Find(line.nameOfFriendToLookAt).transform, line.matchLookDurationToSpeechTime);
                    }
                }
            }
        }
    }
예제 #2
0
    ///
    /// ///////////////////////////////////////////////////////////////////////////////////////////
    /// METHODS


    public void AssembleAndDisplayCommsText()
    {
        // Wipe out the comms text
        ClearConversationLogs();
        // Clear out the list containing responses and their respective line counts
        lineCountsOfResponses.Clear();
        panels.NoSelection();

        // Load the current line text
        UnpackTransitionsForPrompt();

        // Update Target Info Box, run Actions attached to the current line
        // Also run an AnimatedVoiceLine/LookAtTed Action combo if there's an AnimationAudio in the currentLine and hasAudio is checked
        MakeIDChanges(currentLine);
        RunPromptActions(currentLine);

        if (currentLine == noTargetPrompt)
        {
            musicBox.PlayCommsNoSignal();
        }
        if (currentLine.hasAudio && currentLine.animatedSpeechLine != null && transmissionSpeech != null && transmissionAI != null)
        {
            // If this is a dialogue line through the Stardater chat, use method which turns on the duplication of speech through the chat (so the sound is heard both live and through the chat--COOL)
            if (isInDatingChat)
            {
                transmissionSpeech.PlayClipAndStartAnimatingFace(currentLine.animatedSpeechLine, true);
            }
            else
            {
                transmissionSpeech.PlayClipAndStartAnimatingFace(currentLine.animatedSpeechLine);
            }

            transmissionAI.LookAtPerson(currentLine.lookDuration, FindObjectOfType <TeddyHead>().transform, currentLine.matchLookDurationToSpeechTime);
        }

        // A lot of this crap probably isn't needed, since we're not having a scrolling log history of Prompts and responses
        string combinedText = currentLine.line;

        AddToLog(combinedText);

        string runningLog = string.Join("\n", responseLog.ToArray());

        // Tack on an ellipse if the next line is a continuation of the current Prompt
        if (currentLine.continues)
        {
            runningLog = runningLog + "  (...)";
        }
        currentText = runningLog;

        // Generate block of text for list of responses, and call methods to generate char arrays for both Prompt and response list, to unravel the Prompt string (and maybe the response string)
        // Also populate the list of responses and their line counts
        if (textIsUnravelled)
        {
            InitializePromptCharArrayToUnravel(runningLog);

            string listNumberColor   = ColorUtility.ToHtmlStringRGB(responseNumberColor);
            string exitResponseColor = ColorUtility.ToHtmlStringRGB(exitConvoColor);

            if (responses.Count > 0)
            {
                // This will look like shit if it's unravelled--the colors aren't applied until the starting tag is completed
                for (int i = 0; i < responses.Count; i++)
                {
                    lineCountsOfResponses.Add(new ResponseLineCount(responses[i].transitDescription, responses[i].numberOfLines));

                    if (i < responses.Count - 1)
                    {
                        if (responses[i].isExitLine)
                        {
                            listOfResponses += "<color=#" + listNumberColor + ">" + (i + 1).ToString() + ")</color>  <color=#" + exitResponseColor + ">" + responses[i].transitDescription + "</color>\n";
                        }
                        else
                        {
                            listOfResponses += "<color=#" + listNumberColor + ">" + (i + 1).ToString() + ")</color>  " + responses[i].transitDescription + "\n";
                        }
                    }
                    else
                    {
                        if (responses[i].isExitLine)
                        {
                            listOfResponses += "<color=#" + listNumberColor + ">" + (i + 1).ToString() + ")</color>  <color=#" + exitResponseColor + ">" + responses[i].transitDescription + "</color>";
                        }
                        else
                        {
                            listOfResponses += "<color=#" + listNumberColor + ">" + (i + 1).ToString() + ")</color>  " + responses[i].transitDescription;
                        }
                    }
                }
            }
            else
            {
                if (transmissionSpeech != null)
                {
                    Transition exitTransition = new Transition();
                    exitTransition.transitDescription = defaultExitConvoText;
                    exitTransition.numberOfLines      = 1;
                    exitTransition.isExitLine         = true;

                    responses.Add(exitTransition);
                    listOfResponses += "<color=#" + listNumberColor + ">1)</color>  <color=#" + exitResponseColor + ">" + exitTransition.transitDescription + "</color>";
                    lineCountsOfResponses.Add(new ResponseLineCount(exitTransition.transitDescription, exitTransition.numberOfLines));
                }
            }

            currentResponseText = listOfResponses;

            // This sets the relative Y position from the default bottom position for each response in the line count of responses list
            for (int i = lineCountsOfResponses.Count; i > 0; i--)
            {
                if (i == lineCountsOfResponses.Count)
                {
                    lineCountsOfResponses[i - 1].pixelCountDistanceFromDefaultBottomPosition = (-panels.GetPanelSpacerHeight() / 2) + panels.GetPanelSpacerHeight() * lineCountsOfResponses[i - 1].lineCount / 2;
                }
                else
                {
                    lineCountsOfResponses[i - 1].SetPixelYDistanceRelToBottomResponse(lineCountsOfResponses[i].pixelCountDistanceFromDefaultBottomPosition,
                                                                                      lineCountsOfResponses[i].lineCount, panels.GetPanelSpacerHeight());
                }
            }

            InitializeResponseCharArrayToUnravel(listOfResponses);
        }
        else
        {
            unravellingPromptText             = runningLog;
            unravellingResponseTransitionText = currentResponseText;
        }
    }