public override void Process(LipSyncData inputClip, AutoSync.ASProcessDelegate callback)
        {
            List <PhonemeMarker> output            = new List <PhonemeMarker>(inputClip.phonemeData);
            List <bool>          markedForDeletion = new List <bool>();

            output.Sort(LipSync.SortTime);

            switch (cleanupMode)
            {
            default:
            case CleanupMode.Legacy:
                for (int m = 0; m < inputClip.phonemeData.Length; m++)
                {
                    if (m > 0)
                    {
                        if (inputClip.phonemeData[m].time - inputClip.phonemeData[m - 1].time < cleanupAggression && !markedForDeletion[m - 1])
                        {
                            markedForDeletion.Add(true);
                        }
                        else
                        {
                            markedForDeletion.Add(false);
                        }
                    }
                    else
                    {
                        markedForDeletion.Add(false);
                    }
                }
                break;

            case CleanupMode.Simple:

                break;

            case CleanupMode.Advanced:

                break;
            }

            for (int m = 0; m < markedForDeletion.Count; m++)
            {
                if (markedForDeletion[m])
                {
                    output.Remove(inputClip.phonemeData[m]);
                }
            }

            inputClip.phonemeData = output.ToArray();
            callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(true, "", ClipFeatures.None));
        }
Example #2
0
    public VoiceRecordingAnimation(FacialAnimationClip clip, FacialAnimation animationTarget, float estimatedSpeakTime, string phonemesPath)
    {
        this.streamedAnimationClip = clip;
        this.animationTarget       = animationTarget;
        this.phonemesPath          = phonemesPath;
        this.lipSyncer             = new LipSyncData();

        //Used to size our animation channels, this way the internal array in the keyframes list should be big enough.
        this.estimatedSpeakFrames = (int)(FacialAnimation.FRAMERATE * estimatedSpeakTime);

        //Create all our streamed lipsync channels.
        this.streamedLipsyncChannels = new List <IAnimationChannel>(7);
        this.streamedLipsyncChannels.Add(new RotationChannel("Mid_Head_Jnt_03", new List <Vector3>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Corner_In", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("I", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_LowerUp", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_LowerDown", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_UpperUp", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));
        this.streamedLipsyncChannels.Add(new MorphChannel("Lip_UpperDown", new List <float>(50), false, false, true, true, FacialAnimation.FRAMERATE));

        //Add our streamed channels. This will also remove any previous channel with the same name.
        for (int i = 0; i < this.streamedLipsyncChannels.Count; i++)
        {
            IAnimationChannel channel = this.streamedLipsyncChannels[i];
            this.streamedAnimationClip.AddAnimationChannel(channel);
        }

        //Stream data into these animation channels.
        this.lipSyncer.SetLipSyncChannels(this.streamedLipsyncChannels);

        this.hasBegunAnimating   = false;
        this.hasStartedStreaming = false;

        #if UNITY_EDITOR && DEBUG_MODE
        //Close previous instance of window.
        if (visemeVisualizer != null)
        {
            visemeVisualizer.Close();
        }

        EditorApplication.ExecuteMenuItem("Plotagon/VisemeVisualizer");
        visemeVisualizer = EditorWindow.focusedWindow;

        //Allows us to see results in unitys editor window.
        for (int i = 0; i < this.streamedLipsyncChannels.Count; i++)
        {
            IAnimationChannel channel = this.streamedLipsyncChannels[i];
            channel.AddDebugWindow(visemeVisualizer);
        }
        #endif
    }
    void OnEnable()
    {
        lsdTarget = (LipSyncData)target;

        playhead_top    = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Playhead_top.png");
        playhead_line   = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Playhead_middle.png");
        playhead_bottom = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Playhead_bottom.png");
        marker_line     = (Texture2D)EditorGUIUtility.Load("Rogo Digital/Shared/white.png");

        playIcon    = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/play.png");
        stopIcon    = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/stop.png");
        pauseIcon   = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/pause.png");
        previewIcon = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/eye.png");
    }
Example #4
0
        /// <summary>
        /// Check if the supplied LipSyncData clip is compatible with a particular module.
        /// </summary>
        /// <param name="data"></param>
        /// <param name="module"></param>
        /// <returns></returns>
        public static bool CheckIsClipCompatible(LipSyncData data, AutoSyncModule module)
        {
            var          req         = module.GetCompatibilityRequirements();
            ClipFeatures metCriteria = 0;

            if (data.clip)
            {
                metCriteria |= ClipFeatures.AudioClip;
            }

            if (!string.IsNullOrEmpty(data.transcript))
            {
                metCriteria |= ClipFeatures.Transcript;
            }

            if (data.phonemeData != null && data.phonemeData.Length > 0)
            {
                metCriteria |= ClipFeatures.Phonemes;
            }

            if (data.emotionData != null && data.emotionData.Length > 0)
            {
                metCriteria |= ClipFeatures.Emotions;
            }

            if (data.gestureData != null && data.gestureData.Length > 0)
            {
                metCriteria |= ClipFeatures.Gestures;
            }

            // Compare masks
            var isFeatureCompatible = ((req & metCriteria) ^ req) == ClipFeatures.None;

            if (isFeatureCompatible)
            {
                if ((req & ClipFeatures.AudioClip) == ClipFeatures.AudioClip)
                {
                    var outcome = true;

                    return(outcome);
                }

                return(true);
            }
            else
            {
                return(false);
            }
        }
Example #5
0
 public static void startSpeaking()
 {
     //EditorUtility.DisplayProgressBar("Simple Progress Bar", "Shows a progress bar for the given seconds",0.5f);
     qs_no++;
     if (qs_no <= 5)
     {
         string path = "Lipsync/Custom/" + qs[qs_no];
         clip      = Resources.Load <LipSyncData>(path);
         component = GameObject.Find("lincoln").GetComponent <LipSync>();
         component.Play(clip);
     }
     else
     {
         SceneManager.LoadScene("result");
     }
 }
Example #6
0
 private void FinishedProcessingSingle(LipSyncData outputData, AutoSync.ASProcessDelegateData data)
 {
     if (data.success)
     {
         setup.data             = (TemporaryLipSyncData)outputData;
         setup.changed          = true;
         setup.previewOutOfDate = true;
         setup.disabled         = false;
         setup.ShowNotification(new GUIContent("AutoSync Completed Successfully"));
         Close();
     }
     else
     {
         Debug.LogFormat("AutoSync Failed: {0}", data.message);
         ShowNotification(new GUIContent(data.message));
     }
 }
        private void ProcessNext(LipSyncData inputData, ASProcessDelegateData data)
        {
            index++;

            if (finalData == null)
            {
                finalData = new ASProcessDelegateData(true, "", ClipFeatures.None);
            }

            finalData.addedFeatures |= data.addedFeatures;

            if (data.success == false)
            {
                finalData.success = data.success;
                finalData.message = data.message;

                if (onFinishedCallback != null)
                {
                    onFinishedCallback.Invoke(null, finalData);
                }

                moduleSequence = null;
                finalData      = null;
                EditorApplication.Beep();
                EditorUtility.ClearProgressBar();
                return;
            }

            if (index >= moduleSequence.Length)
            {
                if (onFinishedCallback != null)
                {
                    onFinishedCallback.Invoke(inputData, finalData);
                }

                moduleSequence = null;
                finalData      = null;
                EditorUtility.ClearProgressBar();
                return;
            }
            else
            {
                RunModuleSafely(moduleSequence[index], inputData, ProcessNext, silentMode);
            }
        }
Example #8
0
    void OnEnable()
    {
        lsdTarget = (LipSyncData)target;

        playhead_top    = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Playhead_top.png");
        playhead_line   = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Playhead_middle.png");
        playhead_bottom = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Playhead_bottom.png");

        playIcon    = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/play.png");
        stopIcon    = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/stop.png");
        pauseIcon   = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/pause.png");
        previewIcon = (Texture2D)EditorGUIUtility.Load("Rogo Digital/LipSync/Dark/eye.png");

        infoString = lsdTarget.name + " LipSyncData.\nCreated with LipSync Pro " + lsdTarget.version + "\n\n";
        if (lsdTarget.clip)
        {
            infoString += "AudioClip: " + lsdTarget.clip.name + ".\n";
        }
        infoString += "Length: " + lsdTarget.length + ".\nData: ";
        if (lsdTarget.phonemeData.Length > 0)
        {
            infoString += lsdTarget.phonemeData.Length + " phonemes";
            if (lsdTarget.emotionData.Length > 0)
            {
                infoString += ", ";
            }
        }

        if (lsdTarget.emotionData.Length > 0)
        {
            infoString += lsdTarget.emotionData.Length + " emotions";
            if (lsdTarget.gestureData.Length > 0)
            {
                infoString += ", ";
            }
        }

        if (lsdTarget.gestureData.Length > 0)
        {
            infoString += lsdTarget.gestureData.Length + " gestures";
        }

        infoString += ".";
    }
Example #9
0
    public void SetMode(AppMode appMode)
    {
        currentAppMode = appMode;
        OnModeChange?.Invoke(currentAppMode);

        if (currentAppMode == AppMode.SAA)
        {
            ScreenManager.Instance.Set(2);
        }
        else if (currentAppMode == AppMode.CSU)
        {
            ScreenManager.Instance.Set(3);
        }

        if (PlayerPrefs.HasKey("first"))
        {
            //avatar.GetComponent<LipSync>().Play(lipsync[UnityEngine.Random.Range(0, lipsync.Length - 1)]);
            LipSyncData currentdat =
                Resources.Load <LipSyncData>("General_datafiles/" + UnityEngine.Random.Range(1, 28).ToString());
            catdat = currentdat;
            avatar.GetComponent <LipSync>().Play(currentdat);
            if (!firstTime)
            {
                delay = new WaitForSeconds(0.5f);
                StartCoroutine(setMood());
            }
        }
        else
        {
            PlayerPrefs.SetString("first", "first");
            LipSyncData currentdat = Resources.Load <LipSyncData>("General_datafiles/" + "xen");
            catdat = currentdat;
            avatar.GetComponent <LipSync>().Play(currentdat);
            if (!firstTime)
            {
                delay = new WaitForSeconds(0.5f);
                StartCoroutine(setMood());
            }
        }
        firstTime = true;
    }
Example #10
0
        protected PhonemePositions GetPhonemesFromLipSyncData(LipSyncData data, float audioClipLength)
        {
            PhonemePositions pos = new PhonemePositions();

            foreach (var phonemeMarker in data.phonemeData)
            {
                pos.Add(new PhonemePosition(phonemeMarker.time * audioClipLength, phonemes[phonemeMarker.phonemeNumber]));
            }

            foreach (var emotionMarker in data.emotionData)
            {
                pos.AddMood(new MoodPosition(emotionMarker.startTime * audioClipLength, GetMoodFromString(emotionMarker.emotion)));
                pos.AddMood(new MoodPosition(emotionMarker.endTime * audioClipLength, CharacterHead.Moods.Neutral));
            }

            foreach (var gestureMarker in data.gestureData)
            {
                pos.AddMode(new ModePosition(gestureMarker.time * audioClipLength, gestureMarker.gesture));
            }

            return(pos);
        }
    public Coroutine PlaySpeech(AudioClip clip, string xmlStr)
    {
        var data = LipSyncData.CreateFromFile(xmlStr, clip);

        return(StartCoroutine(PlaySpeechCoroutine(data)));
    }
 /// <summary>
 /// Begins processing the supplied inputClip asynchronously using this module's settings, and will call the supplied callback when finished.
 /// </summary>
 /// <param name="inputClip"></param>
 /// <param name="callback"></param>
 /// <param name="customData"></param>
 public abstract void Process(LipSyncData inputClip, AutoSync.ASProcessDelegate callback);
        public void RunSequence(AutoSyncModule[] moduleSequence, ASProcessDelegate onFinishedCallback, LipSyncData inputData, bool silent = false)
        {
            index = 0;
            this.moduleSequence     = moduleSequence;
            this.onFinishedCallback = onFinishedCallback;
            silentMode = silent;

            finalData = new ASProcessDelegateData(true, "", ClipFeatures.None);

            if (moduleSequence.Length > 0)
            {
                RunModuleSafely(moduleSequence[index], inputData, ProcessNext, silent);
            }
        }
Example #14
0
        protected virtual IEnumerator RunLipSyncProc(string charName, AudioSource src, AudioClip audioClip, int lineID, string message)
        {
            this.interruptFlag = false;
            IsTalking          = true;

            string      currentMode = "idle";
            LipSyncData lipSyncData = Resources.Load <LipSyncData>("Lipsync/" + charName + "/" + charName + lineID.ToString());

            if (audioClip == null || lipSyncData == null)
            {
                float fakeClipLength = 3f;
                float startTime      = Time.time;

                fakeClipLength = message.Length * 0.1f;

                // Simulate a talk cycle
                string[] talkCycle = { "AI", "AI", "O", "AI", "E", "AI", "O", "U" };
                int      i         = 0;
                while (Time.time - startTime < fakeClipLength && !interruptFlag)
                {
                    //if(character != null && !character.isTalking) interruptFlag = true;
                    string ph = talkCycle[i % talkCycle.Length];
                    i++;
                    ShowPhoneme(ph);
                    yield return(new WaitForSeconds(.09f));
                }

                ShowPhoneme("MBP");
            }
            else
            {
                VoiceClip voiceClip = new VoiceClip();
                voiceClip.name = audioClip.name;
                voiceClip.text = "text not needed here";
                CharacterHead.Moods mood = CharacterHead.Moods.Neutral;

                SetArmAnimationEnabled(true);

                if (lipSyncData != null)
                {
                    voiceClip.phonemes  = GetPhonemesFromLipSyncData(lipSyncData, audioClip.length);
                    voiceClip.audioClip = audioClip;
                    voiceClip.mood      = mood;

                    CharacterHead.Moods currentMood = CharacterHead.Moods.Neutral;

                    while (src.isPlaying && !interruptFlag)
                    {
                        //if(character != null && !character.isTalking) interruptFlag = true;
                        float time = src.time;

                        bool change = false;

                        voiceClip.phonemes.GetMode(time /*+ pause*/, ref currentMode, ref change);

                        if (change)
                        {
                            ChangeTalkMode(currentMode);
                        }

                        change = false;

                        voiceClip.phonemes.GetMood(time /*+ pause*/, ref currentMood, ref change);

                        if (change)
                        {
                            ChangeMood(currentMood);
                        }

                        string ph = voiceClip.phonemes.GetPhoneme(time /*+ pause*/).Trim();

                        ShowPhoneme(ph);

                        yield return(new WaitForSeconds(1 / 60f));
                    }
                }
            }

            ChangeTalkMode("idle");

            ShowPhoneme("MBP");

            ChangeMood(CharacterHead.Moods.Neutral);

            IsTalking = false;

            animator.SetBool("talking", false);

            ResetArmAnimationStatus();

            yield return(null);
        }
Example #15
0
    void OnGUI()
    {
        EditorStyles.label.wordWrap = true;

        GUILayout.Space(10);
        tab = GUILayout.Toolbar(tab, new string[] { "AutoSync Settings", "Batch Process" });
        GUILayout.Space(10);

        bool ready = true;

        if (tab == 0)
        {
            ready = currentModules.Count > 0;

            GUILayout.Space(5);
            GUILayout.Box("Presets", EditorStyles.boldLabel);
            GUILayout.Space(5);
            presetScroll = GUILayout.BeginScrollView(presetScroll, GUILayout.MaxHeight(80));
            if (presets.Length == 0)
            {
                GUILayout.Space(10);
                GUILayout.Box("No Presets Found", EditorStyles.centeredGreyMiniLabel);
            }
            else
            {
                EditorGUILayout.BeginVertical();
                for (int i = -1; i < presets.Length; i++)
                {
                    var lineRect = EditorGUILayout.BeginHorizontal();
                    if (i == presetHighlight)
                    {
                        GUI.Box(lineRect, "", (GUIStyle)"SelectionRect");
                    }

                    if (i >= 0)
                    {
                        if (GUILayout.Button(presets[i].displayName, EditorStyles.label))
                        {
                            LoadPreset(i);
                        }
                    }
                    else
                    {
                        if (GUILayout.Button("None", EditorStyles.label))
                        {
                            LoadPreset(-1);
                        }
                    }

                    EditorGUILayout.EndHorizontal();
                }
                GUILayout.EndVertical();
            }
            GUILayout.EndScrollView();
            GUILayout.Space(5);
            var infoRect = EditorGUILayout.BeginVertical(GUILayout.ExpandHeight(true));
            GUI.Box(infoRect, "", EditorStyles.helpBox);
            GUILayout.Space(5);
            if (presetHighlight == -1)
            {
                GUILayout.Box(new GUIContent("Select a preset for more information.", infoIcon), EditorStyles.label);
            }
            else
            {
                GUILayout.Box(presets[presetHighlight].displayName, EditorStyles.boldLabel);
                EditorGUILayout.LabelField(presets[presetHighlight].description, EditorStyles.label);
                GUILayout.Space(5);
                GUILayout.BeginHorizontal();
                GUILayout.FlexibleSpace();
                GUILayout.FlexibleSpace();
                GUILayout.EndHorizontal();
            }
            GUILayout.Space(5);
            EditorGUILayout.EndVertical();
            GUILayout.Space(5);
            Rect toolbarRect = EditorGUILayout.BeginHorizontal();
            toolbarRect.x = 0;
            GUI.Box(toolbarRect, "", EditorStyles.toolbar);
            GUILayout.Box("Current Modules", EditorStyles.miniLabel);
            GUILayout.FlexibleSpace();
            Rect dropDownRect = EditorGUILayout.BeginHorizontal();
            if (GUILayout.Button(new GUIContent("Add", plusIcon, "Add a new module to the list"), EditorStyles.toolbarDropDown, GUILayout.Width(70)))
            {
                GenericMenu addMenu = new GenericMenu();
                for (int i = 0; i < autoSyncModuleTypes.Count; i++)
                {
                    bool isAdded = false;

                    for (int m = 0; m < currentModules.Count; m++)
                    {
                        if (currentModules[m].GetType() == autoSyncModuleTypes[i])
                        {
                            isAdded = true;
                            break;
                        }
                    }

                    if (isAdded)
                    {
                        addMenu.AddDisabledItem(new GUIContent(moduleInfos[autoSyncModuleTypes[i]].displayName));
                    }
                    else
                    {
                        int e = i;
                        addMenu.AddItem(new GUIContent(moduleInfos[autoSyncModuleTypes[i]].displayName), false, () => { AddModule(e); });
                    }
                }
                addMenu.AddSeparator("");
                addMenu.AddItem(new GUIContent("Get More Modules"), false, () => { RDExtensionWindow.ShowWindow("LipSync_Pro"); });
                addMenu.DropDown(dropDownRect);
            }
            if (GUILayout.Button(new GUIContent("New Preset", saveIcon, "Save the current setup as a new preset"), EditorStyles.toolbarButton, GUILayout.Width(90)))
            {
                var savePath = EditorUtility.SaveFilePanelInProject("Save AutoSync Preset", "New AutoSync Preset", "asset", "");
                if (!string.IsNullOrEmpty(savePath))
                {
                    AutoSyncPreset preset = null;

                    if (File.Exists(savePath))
                    {
                        preset = AssetDatabase.LoadAssetAtPath <AutoSyncPreset>(savePath);
                    }
                    else
                    {
                        preset = CreateInstance <AutoSyncPreset>();
                        preset.CreateFromModules(currentModules.ToArray());

                        preset.displayName = Path.GetFileNameWithoutExtension(savePath);
                        preset.description = "Using: ";
                        for (int i = 0; i < currentModules.Count; i++)
                        {
                            preset.description += currentModules[i].GetType().Name;
                            if (i < currentModules.Count - 1)
                            {
                                preset.description += ", ";
                            }
                        }
                    }

                    AssetDatabase.CreateAsset(preset, savePath);
                    AssetDatabase.Refresh();

                    presets         = AutoSyncUtility.GetPresets();
                    presetHighlight = -1;
                }
            }
            EditorGUILayout.EndHorizontal();
            EditorGUILayout.EndHorizontal();
            EditorGUILayout.BeginVertical();
            settingsScroll = GUILayout.BeginScrollView(settingsScroll, false, false);
            if (currentModules.Count == 0)
            {
                GUILayout.Space(10);
                GUILayout.Box("No Modules Added", EditorStyles.centeredGreyMiniLabel);
            }
            else
            {
                for (int i = 0; i < currentModules.Count; i++)
                {
                    var type = currentModules[i].GetType();
                    var info = moduleInfos[type];
                    GUILayout.BeginHorizontal();
                    GUILayout.Space(10);
                    GUILayout.Box(new GUIContent(info.displayName, infoIcon, info.description), EditorStyles.label);
                    GUILayout.Space(10);
                    GUILayout.EndHorizontal();
                    GUILayout.BeginHorizontal();
                    GUILayout.BeginVertical(GUILayout.Width(15));
                    GUILayout.Space(10);
                    if (GUILayout.Button(new GUIContent(minusIcon, "Remove Module")))
                    {
                        DestroyImmediate(serializedModules[i]);
                        DestroyImmediate(currentModules[i]);
                        serializedModules.RemoveAt(i);
                        currentModules.RemoveAt(i);
                        break;
                    }
                    GUILayout.Space(5);
                    EditorGUI.BeginDisabledGroup(i == 0);
                    if (GUILayout.Button(new GUIContent(upIcon, "Move Up")))
                    {
                        currentModules.Insert(i - 1, currentModules[i]);
                        currentModules.RemoveAt(i + 1);
                        serializedModules.Insert(i - 1, serializedModules[i]);
                        serializedModules.RemoveAt(i + 1);
                        break;
                    }
                    EditorGUI.EndDisabledGroup();
                    EditorGUI.BeginDisabledGroup(i + 2 > currentModules.Count);
                    if (GUILayout.Button(new GUIContent(downIcon, "Move Down")))
                    {
                        currentModules.Insert(i + 2, currentModules[i]);
                        currentModules.RemoveAt(i);
                        serializedModules.Insert(i + 2, serializedModules[i]);
                        serializedModules.RemoveAt(i);
                        break;
                    }
                    EditorGUI.EndDisabledGroup();
                    GUILayout.FlexibleSpace();
                    GUILayout.EndVertical();
                    GUILayout.BeginVertical();
                    var missing = GetMissingClipFeaturesInClipEditor(currentModules, i);
                    if (missing != ClipFeatures.None)
                    {
                        EditorGUILayout.HelpBox(string.Format("This module requires: {0}.\n These features must either be present in the clip already, or be provided by a module above this one.", missing), MessageType.Error);
                    }
                    serializedModules[i].OnInspectorGUI();
                    GUILayout.EndVertical();
                    GUILayout.EndHorizontal();
                    GUILayout.Space(15);
                }
            }
            GUILayout.FlexibleSpace();
            GUILayout.Space(5);
            GUILayout.EndScrollView();
            EditorGUILayout.EndVertical();
            GUILayout.Space(5);
            GUILayout.BeginHorizontal();
            GUILayout.FlexibleSpace();
            EditorGUI.BeginDisabledGroup(!ready);
            if (GUILayout.Button("Start Single Process", GUILayout.Height(25)))
            {
                if (autoSyncInstance == null)
                {
                    autoSyncInstance = new AutoSync();
                }

                autoSyncInstance.RunSequence(currentModules.ToArray(), FinishedProcessingSingle, (LipSyncData)setup.data);
            }
            EditorGUI.EndDisabledGroup();
            GUILayout.FlexibleSpace();
            GUILayout.EndHorizontal();
            GUILayout.Space(20);
        }
        else
        {
            ready = clips.Count > 0 && currentModules.Count > 0;

            GUILayout.Space(5);
            GUILayout.Box("Select AudioClips", EditorStyles.boldLabel);
            GUILayout.Space(5);
            batchScroll = GUILayout.BeginScrollView(batchScroll);
            for (int a = 0; a < clips.Count; a++)
            {
                GUILayout.BeginHorizontal();
                GUILayout.Space(5);
                clips[a] = (AudioClip)EditorGUILayout.ObjectField(clips[a], typeof(AudioClip), false);
                GUILayout.FlexibleSpace();
                if (GUILayout.Button("Remove", GUILayout.MaxWidth(200)))
                {
                    clips.RemoveAt(a);
                    break;
                }
                GUILayout.Space(5);
                GUILayout.EndHorizontal();
            }
            GUILayout.Space(5);
            GUILayout.EndScrollView();
            GUILayout.Space(5);
            GUILayout.BeginHorizontal();
            if (GUILayout.Button("Add AudioClip"))
            {
                clips.Add(null);
            }
            if (GUILayout.Button("Add Selected"))
            {
                foreach (AudioClip c in Selection.GetFiltered(typeof(AudioClip), SelectionMode.Assets))
                {
                    if (!clips.Contains(c))
                    {
                        clips.Add(c);
                    }
                }
            }
            GUILayout.EndHorizontal();
            GUILayout.Space(10);
            EditorGUILayout.HelpBox("Settings from the AutoSync Settings tab will be used. Make sure they are correct.", MessageType.Info);
            xmlMode         = EditorGUILayout.Toggle("Export as XML", xmlMode);
            loadTranscripts = EditorGUILayout.Toggle("Load Transcripts .txt", loadTranscripts);
            GUILayout.BeginHorizontal();
            GUILayout.FlexibleSpace();
            EditorGUI.BeginDisabledGroup(!ready);
            if (GUILayout.Button("Start Batch Process", GUILayout.Height(25)))
            {
                currentClip = 0;

                if (clips.Count > 0)
                {
                    if (autoSyncInstance == null)
                    {
                        autoSyncInstance = new AutoSync();
                    }

                    LipSyncData tempData = CreateInstance <LipSyncData>();
                    tempData.clip   = clips[currentClip];
                    tempData.length = tempData.clip.length;

                    if (loadTranscripts)
                    {
                        tempData.transcript = AutoSyncUtility.TryGetTranscript(tempData.clip);
                    }

                    autoSyncInstance.RunSequence(currentModules.ToArray(), FinishedProcessingMulti, tempData);
                }
                else
                {
                    ShowNotification(new GUIContent("No clips added for batch processing!"));
                }
            }
            EditorGUI.EndDisabledGroup();
            GUILayout.FlexibleSpace();
            GUILayout.EndHorizontal();
            GUILayout.Space(20);
        }
    }
Example #16
0
    private void FinishedProcessingMulti(LipSyncData outputData, AutoSync.ASProcessDelegateData data)
    {
        if (data.success)
        {
            var settings = LipSyncEditorExtensions.GetProjectFile();

            // Create File
            string outputPath = AssetDatabase.GetAssetPath(outputData.clip);
            outputPath = Path.ChangeExtension(outputPath, xmlMode ? "xml" : "asset");

            try
            {
                LipSyncClipSetup.SaveFile(settings, outputPath, xmlMode, outputData.transcript, outputData.length, outputData.phonemeData, outputData.emotionData,
                                          outputData.gestureData, outputData.clip);
            }
            catch (Exception e)
            {
                Debug.LogError(e.StackTrace);
            }
        }
        else
        {
            batchIncomplete = true;
            string clipName = "Undefined";
            if (outputData.clip)
            {
                clipName = outputData.clip.name;
            }

            Debug.LogErrorFormat("AutoSync: Processing failed on clip '{0}'. Continuing with batch.", clipName);
        }

        if (currentClip < clips.Count)
        {
            currentClip++;

            if (autoSyncInstance == null)
            {
                autoSyncInstance = new AutoSync();
            }

            LipSyncData tempData = CreateInstance <LipSyncData>();
            tempData.clip   = clips[currentClip];
            tempData.length = tempData.clip.length;

            if (loadTranscripts)
            {
                tempData.transcript = AutoSyncUtility.TryGetTranscript(tempData.clip);
            }

            autoSyncInstance.RunSequence(currentModules.ToArray(), FinishedProcessingMulti, tempData);
        }
        else
        {
            AssetDatabase.Refresh();
            EditorUtility.ClearProgressBar();

            if (!batchIncomplete)
            {
                setup.ShowNotification(new GUIContent("Batch AutoSync Completed Successfully"));
            }
            else
            {
                setup.ShowNotification(new GUIContent("Batch AutoSync Completed With Errors"));
            }

            Close();
        }
    }
 /// <summary>
 /// Check if the supplied LipSyncData clip is compatible with a particular module.
 /// </summary>
 /// <param name="data"></param>
 /// <param name="module"></param>
 /// <returns></returns>
 public static bool CheckIsClipCompatible(LipSyncData data, AutoSyncModule module)
 {
     return(GetMissingClipFeatures(data, module) == ClipFeatures.None);
 }
 private IEnumerator PlaySpeechCoroutine(LipSyncData data)
 {
     _speechController.Play(data);
     yield return(new WaitWhile(() => _speechController.IsPlaying));
 }
Example #19
0
        public override void Process(LipSyncData inputClip, AutoSync.ASProcessDelegate callback)
        {
            string mfaPath   = EditorPrefs.GetString("as_montrealfa_application_path");
            string audioPath = AssetDatabase.GetAssetPath(inputClip.clip).Substring("/Assets".Length);

            if (audioPath == null)
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Audio path could not be found.", ClipFeatures.None));
                return;
            }

            if (!AutoSyncUtility.VerifyProgramAtPath(mfaPath, "align"))
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Montreal Forced Aligner application path is not verified.", ClipFeatures.None));
                return;
            }

            // Get absolute path
            audioPath = Application.dataPath + "/" + audioPath;

            // Check Path
            if (audioPath.IndexOfAny(Path.GetInvalidPathChars()) >= 0 || Path.GetFileNameWithoutExtension(audioPath).IndexOfAny(Path.GetInvalidFileNameChars()) >= 0)
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Audio path contains invalid characters.", ClipFeatures.None));
                return;
            }

            // Load Language Model
            ASMontrealLanguageModel model = ASMontrealLanguageModel.Load(languageModel);

            if (model == null)
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Language Model failed to load.", ClipFeatures.None));
                return;
            }

            string basePath    = model.GetBasePath();
            string lexiconPath = "";

            if (model.usePredefinedLexicon)
            {
                lexiconPath = basePath + model.lexiconPath;
            }
            else
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Support for generated lexicons using a G2P model is coming soon.", ClipFeatures.None));
                return;
            }

            string adjustedName = Path.GetFileNameWithoutExtension(audioPath).Replace('.', '_').Replace(' ', '_').Replace('\\', '_').Replace('/', '_');

            string corpusPath = Application.temporaryCachePath + "/" + adjustedName + "_MFA_Corpus";
            string outputPath = Application.temporaryCachePath + "/" + adjustedName + "_MFA_Output";

            // Delete folders if they already exist
            try
            {
                if (Directory.Exists(corpusPath))
                {
                    Directory.Delete(corpusPath, true);
                }
                if (Directory.Exists(outputPath))
                {
                    Directory.Delete(outputPath, true);
                }
            }
            catch
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Attempt to clear temporary MFA folders failed. Are they open in another application?", ClipFeatures.None));
                return;
            }

            // Create temporary folders
            Directory.CreateDirectory(corpusPath);
            Directory.CreateDirectory(outputPath);

            // Copy or convert audio clip to corpus folder
            if (AutoSyncConversionUtility.IsConversionAvailable && useAudioConversion)
            {
                AutoSyncConversionUtility.StartConversion(audioPath, corpusPath + "/" + adjustedName + ".wav", AutoSyncConversionUtility.AudioFormat.WavPCM, 16000, 16, 1);
            }
            else
            {
                File.Copy(audioPath, corpusPath + "/" + adjustedName + Path.GetExtension(audioPath));
            }

            // Create transcript file in corpus folder
            StreamWriter transcriptWriter = File.CreateText(corpusPath + "/" + adjustedName + ".lab");

            transcriptWriter.Write(inputClip.transcript.Replace('-', ' '));
            transcriptWriter.Close();

            // Run aligner
            Directory.SetCurrentDirectory(Application.dataPath.Remove(Application.dataPath.Length - 6));
            mfaPath = Path.GetFullPath(mfaPath);

            System.Diagnostics.Process process = new System.Diagnostics.Process();
            process.StartInfo.FileName = mfaPath;

#if UNITY_EDITOR_WIN
            process.StartInfo.Arguments = "\"" + corpusPath + "\" \"" + lexiconPath + "\" \"" + basePath + model.acousticModelPath + "\" \"" + outputPath + "\" --quiet";
#elif UNITY_EDITOR_OSX
            process.StartInfo.Arguments = "\"" + corpusPath + "\" \"" + lexiconPath + "\" \"" + basePath + model.acousticModelPath + "\" \"" + outputPath + "\"";
#endif
            process.StartInfo.UseShellExecute = true;
            process.StartInfo.CreateNoWindow  = true;

            process.Start();
            process.WaitForExit(15000);

            if (!process.HasExited)
            {
                process.Kill();
                process.Close();
            }

            var           outputFiles  = Directory.GetFiles(outputPath, "*", SearchOption.AllDirectories);
            string        textGridPath = "";
            List <string> oovs         = new List <string>();
            bool          nothingFound = true;

            for (int i = 0; i < outputFiles.Length; i++)
            {
                if (Path.GetExtension(outputFiles[i]).ToLowerInvariant() == ".textgrid")
                {
                    textGridPath = outputFiles[i];
                    nothingFound = false;
                }
                else if (Path.GetExtension(outputFiles[i]).ToLowerInvariant() == ".txt")
                {
                    string name   = Path.GetFileNameWithoutExtension(outputFiles[i]);
                    var    reader = new StreamReader(outputFiles[i]);
                    if (name == "oovs_found")
                    {
                        while (!reader.EndOfStream)
                        {
                            oovs.Add(reader.ReadLine());
                        }
                    }
                    reader.Close();
                }
            }

            // Detect out-of-vocab words, filter and retry if enabled.
            if (oovs.Count > 0)
            {
                Debug.Log("Found out-of-vocabulary words:");
                for (int i = 0; i < oovs.Count; i++)
                {
                    Debug.Log(oovs[i]);
                }
            }

            if (nothingFound)
            {
                if (autoRetry)
                {
                    if (attempts < maxAttempts - 1)
                    {
                        attempts++;
                        Process(inputClip, callback);
                        return;
                    }
                }

                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "MFA Application Failed. Check your audio encoding or enable conversion.", ClipFeatures.None));
                return;
            }

            // Load in TextGrid
            TextGridUtility.TextGridItem[] items = TextGridUtility.ParseTextGridFile(textGridPath);

            var  settings     = LipSyncEditorExtensions.GetProjectFile();
            bool needsMapping = true;
            Dictionary <string, string> phonemeMapper = null;

            if (model.sourcePhoneticAlphabetName == settings.phonemeSet.scriptingName)
            {
                needsMapping = false;
            }
            else
            {
                needsMapping = true;
                switch (model.mappingMode)
                {
                case AutoSyncPhonemeMap.MappingMode.InternalMap:
                    phonemeMapper = model.phonemeMap.GenerateAtoBDictionary();
                    break;

                case AutoSyncPhonemeMap.MappingMode.ExternalMap:
                    if (model.externalMap)
                    {
                        phonemeMapper = model.externalMap.phonemeMap.GenerateAtoBDictionary();
                    }
                    else
                    {
                        callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Language Model specifies an external phoneme map, but no phoneme map was provided.", ClipFeatures.None));
                        return;
                    }
                    break;

                default:
                case AutoSyncPhonemeMap.MappingMode.AutoDetect:
                    phonemeMapper = AutoSyncUtility.FindBestFitPhonemeMap(model.sourcePhoneticAlphabetName, settings.phonemeSet.scriptingName);
                    if (phonemeMapper == null)
                    {
                        callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, string.Format("No PhonemeMap could be found to map from '{0}' to the current PhonemeSet '{1}'.", model.sourcePhoneticAlphabetName, settings.phonemeSet.scriptingName), ClipFeatures.None));
                        return;
                    }
                    break;
                }

                if (phonemeMapper.Count == 0)
                {
                    Debug.LogWarning("PhonemeMap is empty - this may be due to the language model's mapping mode being set to 'InternalMap' but with no entries being added to the map. Phonemes may not be generated.");
                }
            }

            // Get Phones
            List <PhonemeMarker> data = new List <PhonemeMarker>();

            if (items != null && items.Length == 2)
            {
                for (int i = 0; i < items[1].intervals.Length; i++)
                {
                    if (items[1].intervals[i] == null)
                    {
                        Debug.LogFormat("Interval {0} is null :o", i);
                        continue;
                    }

                    string label = items[1].intervals[i].text.Split('"')[1];
                    label = System.Text.RegularExpressions.Regex.Replace(label, "[0-9]", "");

                    if (label != "sil")
                    {
                        if (phonemeMapper.ContainsKey(label))
                        {
                            string phonemeName = needsMapping ? phonemeMapper[label] : label;

                            bool found = false;
                            int  phoneme;
                            for (phoneme = 0; phoneme < settings.phonemeSet.phonemes.Length; phoneme++)
                            {
                                if (settings.phonemeSet.phonemes[phoneme].name == phonemeName)
                                {
                                    found = true;
                                    break;
                                }
                            }

                            if (found)
                            {
                                double start = items[1].intervals[i].xmin / inputClip.length;
                                double end   = items[1].intervals[i].xmax / inputClip.length;

                                double length = end - start;
                                if ((length * inputClip.length) < minLengthForSustain)
                                {
                                    data.Add(new PhonemeMarker(phoneme, (float)(start + (length / 2))));
                                }
                                else
                                {
                                    data.Add(new PhonemeMarker(phoneme, (float)start, 1, true));
                                    data.Add(new PhonemeMarker(phoneme, (float)end));
                                }
                            }
                            else
                            {
                                Debug.LogWarning("Phoneme mapper returned '" + phonemeName + "' but this phoneme does not exist in the current set. Skipping this entry.");
                            }
                        }
                        else
                        {
                            Debug.LogWarning("Phoneme mapper does not contain '" + label + "' Skipping this entry.");
                        }
                    }
                }
            }
            else
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Data loaded from MFA TextGrid file was invalid or incomplete.", ClipFeatures.None));
                return;
            }


            inputClip.phonemeData = data.ToArray();

            if (oovs.Count > 0)
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(true, "Completed, but some words were not found. Check the console.", GetOutputCompatibility()));
            }
            else
            {
                callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(true, "", GetOutputCompatibility()));
            }

            return;
        }
        public override void Process(LipSyncData inputClip, AutoSync.ASProcessDelegate callback)
        {
            bool   converted = false;
            string audioPath = AssetDatabase.GetAssetPath(inputClip.clip).Substring("/Assets".Length);

            if (audioPath != null)
            {
                // Get absolute path
                audioPath = Application.dataPath + "/" + audioPath;

                // Check Path
                if (audioPath.IndexOfAny(Path.GetInvalidPathChars()) >= 0 || Path.GetFileNameWithoutExtension(audioPath).IndexOfAny(Path.GetInvalidFileNameChars()) >= 0)
                {
                    callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Audio path contains invalid characters.", ClipFeatures.None));
                    return;
                }

                bool failed = false;

                if (AutoSyncConversionUtility.IsConversionAvailable && useAudioConversion)
                {
                    converted = true;
                    string newPath = Path.ChangeExtension(audioPath, ".converted.wav");
                    if (!AutoSyncConversionUtility.StartConversion(audioPath, newPath, AutoSyncConversionUtility.AudioFormat.WavPCM, 16000, 16, 1))
                    {
                        failed = true;
                    }
                    audioPath = newPath;
                }

                if (!File.Exists(audioPath) || failed)
                {
                    if (converted)
                    {
                        if (File.Exists(audioPath))
                        {
                            File.Delete(audioPath);
                            AssetDatabase.Refresh();
                        }
                    }

                    callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Audio conversion failed or file was deleted.", ClipFeatures.None));
                    return;
                }

                // Load Language Model
                ASPocketSphinxLanguageModel model = ASPocketSphinxLanguageModel.Load(languageModel);
                if (model == null)
                {
                    if (converted)
                    {
                        if (File.Exists(audioPath))
                        {
                            File.Delete(audioPath);
                            AssetDatabase.Refresh();
                        }
                    }
                    callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, "Language Model failed to load.", ClipFeatures.None));
                    return;
                }
                string basePath = model.GetBasePath();

                List <string> args = new List <string>();
                args.Add("-infile");
                args.Add(audioPath);
                args.Add("-hmm");
                args.Add(basePath + model.hmmDir);
                args.Add("-allphone");
                args.Add(basePath + model.allphoneFile);
                if (allphone_ciEnabled)
                {
                    args.Add("-allphone_ci"); args.Add("yes");
                }
                if (backtraceEnabled)
                {
                    args.Add("-backtrace"); args.Add("yes");
                }
                args.Add("-time");
                args.Add("yes");
                args.Add("-beam");
                args.Add("1e" + beamExponent);
                args.Add("-pbeam");
                args.Add("1e" + pbeamExponent);
                args.Add("-lw");
                args.Add(lwValue.ToString());

                SphinxWrapper.Recognize(args.ToArray());

                ContinuationManager.Add(() => SphinxWrapper.isFinished, () =>
                {
                    if (SphinxWrapper.error != null)
                    {
                        callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(false, SphinxWrapper.error, ClipFeatures.None));
                        return;
                    }

                    List <PhonemeMarker> data = ParseOutput(
                        SphinxWrapper.result.Split(new string[] { "\r\n", "\n" }, StringSplitOptions.RemoveEmptyEntries),
                        model,
                        inputClip.clip
                        );

                    inputClip.phonemeData = data.ToArray();
                    callback.Invoke(inputClip, new AutoSync.ASProcessDelegateData(true, "", GetOutputCompatibility()));

                    if (converted)
                    {
                        if (File.Exists(audioPath))
                        {
                            File.Delete(audioPath);
                            AssetDatabase.Refresh();
                        }
                    }
                });
            }
        }
    private void OnMessage(object resp, Dictionary <string, object> customData)
    {
        Log.Debug("ExampleConversation.OnMessage()", "Conversation: Message Response: {0}",
                  customData["json"].ToString());

        //  Convert resp to fsdata
        fsData   fsdata = null;
        fsResult r      = _serializer.TrySerialize(resp.GetType(), resp, out fsdata);

        if (!r.Succeeded)
        {
            throw new WatsonException(r.FormattedMessages);
        }

        //  Convert fsdata to MessageResponse
        MessageResponse messageResponse = new MessageResponse();
        object          obj             = messageResponse;

        r = _serializer.TryDeserialize(fsdata, obj.GetType(), ref obj);
        if (!r.Succeeded)
        {
            throw new WatsonException(r.FormattedMessages);
        }
        // Debug.Log(messageResponse.output.nodes_visited[messageResponse.output.nodes_visited.Length]);
        if (messageResponse.output.text.Length == 0)
        {
            if (AppManager.Instance.currentAppMode == AppMode.SAA)
            {
                answertext[0].SetActive(true);
                sSA_UI_OUT.Answer_txt.text = "please type a valid question";
            }
            else if (AppManager.Instance.currentAppMode == AppMode.CSU)
            {
                answertext[1].SetActive(true);
                cSU_UI_OUT.Answer_txt.text = "please type a valid question";
            }
        }
        else
        {
            if (AppManager.Instance.currentAppMode == AppMode.SAA)
            {
                temp = new string[2];
                //temp = messageResponse.output.text[0].Split('/');
                //Debug.Log(temp[0]+"answer"+temp[1]);
                Debug.Log(messageResponse.intents[0].confidence);
                float confdc = messageResponse.intents[0].confidence;
                if (confdc > 0.5f)
                {
                    temp = messageResponse.output.text[0].Split('/');
                    sSA_UI_OUT.Answer_txt.text = temp[0];
                    //AppManager.AnswerIndx = int.Parse(temp[1]);
                    //SAA_Avatar.GetComponent<LipSync>().defaultClip = SAA_dat[AppManager.AnswerIndx - 1];
                    // Debug.Log(AppManager.AnswerIndx);
                    // SAA_Avatar.GetComponent<LipSync>().Play(SAA_dat[AppManager.AnswerIndx - 1]);
                    currentdat = Resources.Load <LipSyncData>("SAA_datafiles/" + temp[1]);
                    answertext[0].SetActive(true);
                    SAA_Avatar.GetComponent <LipSync>().Play(currentdat);
                    animator.SetBool("StartTalk", true);
                    animator.SetBool("StopTalk", false);

                    StartCoroutine(AudioSound());
                }
                else
                {
                    answertext[0].SetActive(true);
                    sSA_UI_OUT.Answer_txt.text = "please type a valid question";
                    cSU_UI_OUT.Answer_txt.text = "please type a valid question";
                }
            }
            else if (AppManager.Instance.currentAppMode == AppMode.CSU)

            {
                temp = new string[2];
                //temp = messageResponse.output.text[0].Split('/');
                //Debug.Log(temp[0]+"answer"+temp[1]);
                Debug.Log(messageResponse.intents[0].confidence);
                float confdc = messageResponse.intents[0].confidence;
                if (confdc > 0.5f)
                {
                    temp = messageResponse.output.text[0].Split('/');
                    answertext[1].SetActive(true);
                    cSU_UI_OUT.Answer_txt.text = temp[0];
                    // AppManager.AnswerIndx = int.Parse(temp[1]);
                    //SAA_Avatar.GetComponent<LipSync>().defaultClip = SAA_dat[AppManager.AnswerIndx - 1];
                    // Debug.Log("coming");
                    // Debug.Log(AppManager.AnswerIndx - 1);
                    // SAA_Avatar.GetComponent<LipSync>().Play(CSU_dat[AppManager.AnswerIndx - 1]);
                    currentdat = Resources.Load <LipSyncData>("CSU_datafiles/" + temp[1]);
                    answertext[1].SetActive(true);
                    SAA_Avatar.GetComponent <LipSync>().Play(currentdat);
                    animator.SetBool("StartTalk", true);
                    animator.SetBool("StopTalk", false);
                    StartCoroutine(AudioSound());
                }
                else
                {
                    answertext[1].SetActive(true);
                    sSA_UI_OUT.Answer_txt.text = "please type a valid question";
                    cSU_UI_OUT.Answer_txt.text = "please type a valid question";
                }
            }
        }

        Debug.Log(messageResponse.output.nodes_visited[0]);
        //  Set context for next round of messaging
        object _tempContext = null;

        (resp as Dictionary <string, object>).TryGetValue("context", out _tempContext);

        if (_tempContext != null)
        {
            _context = _tempContext as Dictionary <string, object>;
        }
        else
        {
            Log.Debug("ExampleConversation.OnMessage()", "Failed to get context");
        }
        _waitingForResponse = false;
    }