コード例 #1
0
 /// <summary>
 /// Function that is called when the given SpeechToTextServiceWidget has gotten its last response. If there are no waiting
 /// SpeechToTextServiceWidgets left, then this function will wrap-up the current comparison session.
 /// </summary>
 /// <param name="serviceWidget">The speech-to-text service widget that received a last response</param>
 void OnSpeechToTextReceivedLastResponse(SpeechToTextServiceWidget serviceWidget)
 {
     SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "Response from " + serviceWidget.SpeechToTextServiceString());
     m_WaitingSpeechToTextServiceWidgets.Remove(serviceWidget);
     if (m_WaitingSpeechToTextServiceWidgets.Count == 0)
     {
         SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "Responses from everyone");
         FinishComparisonSession();
     }
 }
コード例 #2
0
 /// <summary>
 /// Clears the current results text and tells the speech-to-text service to start recording.
 /// </summary>
 public void StartRecording()
 {
     SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "Start service widget recording");
     m_WillDisplayReceivedResults         = true;
     m_WaitingForLastFinalResultOfSession = false;
     m_LastResultWasFinal   = false;
     m_PreviousFinalResults = "";
     results = m_PreviousFinalResults;
     m_SpeechToTextService.StartRecording();
     onRecord.Invoke();
 }
コード例 #3
0
 /// <summary>
 /// Unregisters callbacks with each SpeechToTextServiceWidget.
 /// </summary>
 void UnregisterSpeechToTextServiceWidgetsCallbacks()
 {
     if (m_SpeechToTextServiceWidgets != null)
     {
         SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "unregister service widgets callbacks");
         foreach (var serviceWidget in m_SpeechToTextServiceWidgets)
         {
             SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "unregister service widget callbacks");
             serviceWidget.UnregisterOnRecordingTimeout(OnRecordTimeout);
             serviceWidget.UnregisterOnReceivedLastResponse(OnSpeechToTextReceivedLastResponse);
         }
     }
 }
コード例 #4
0
        /// <summary>
        /// Starts recording audio for each speech-to-text service widget if not already recording.
        /// </summary>
        void StartRecording()
        {
            if (!m_IsRecording)
            {
                SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "Start comparison recording");
                m_IsCurrentlyInSpeechToTextSession = true;
                m_IsRecording = true;

                m_WaitingSpeechToTextServiceWidgets.Clear();

                SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "tell service widget to start recording");
                m_SpeechToTextServiceWidgets.StartRecording();
            }
        }
 /// <summary>
 /// Does any final processing necessary for the results of the last started session and then
 /// stops the widget from displaying results until the start of the next session.
 /// </summary>
 void ProcessEndResults()
 {
     SmartLogger.Log(DebugFlags.SpeechToTextWidgets, m_SpeechToTextService.GetType().ToString() + " got last response");
     if (m_ComparisonPhrase != null)
     {
         DisplayAccuracyOfEndResults(m_ComparisonPhrase);
     }
     LogFileManager.Instance.WriteTextToFileIfShouldLog(SpeechToTextServiceString() + ": " + m_ResultsTextUI.text);
     if (m_OnReceivedLastResponse != null)
     {
         m_OnReceivedLastResponse(this);
     }
     m_WillDisplayReceivedResults = false;
 }
コード例 #6
0
        /// <summary>
        /// Translates speech to text by making a request to the speech-to-text API.
        /// </summary>
        protected override IEnumerator TranslateRecordingToText()
        {
            m_TempAudioComponent.ClearTempAudioFiles();

            // Save recorded audio to a WAV file.
            string recordedAudioFilePath = SavWav.Save(m_TempAudioComponent.TempAudioRelativePath(), AudioRecordingManager.Instance.RecordedAudio);

            // Construct a request with the WAV file and send it.
            var request = new Request("POST", Constants.WitAiSpeechToTextBaseURL + "?" +
                                      Constants.WitAiVersionParameterName + "=" + DateTime.Now.ToString(Constants.WitAiVersionDateFormat));

            request.headers.Add("Authorization", "Bearer " + m_APIAccessToken);
            request.headers.Add("Content-Type", "audio/wav");
            request.Bytes = File.ReadAllBytes(recordedAudioFilePath);
            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "Sending request");
            request.Send();

            float startTime = Time.time;

            while (!request.isDone)
            {
                yield return(null);
            }
            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "response time: " + (Time.time - startTime));

            // Finally, grab the response JSON once the request is done.
            var responseJSON = new JSONObject(request.response.Text, int.MaxValue);

            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "Received request result");
            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, responseJSON.ToString());

            string errorText = WitAiSpeechToTextResponseJSONParser.GetErrorFromResponseJSON(responseJSON);

            if (errorText != null)
            {
                if (m_OnError != null)
                {
                    m_OnError(errorText);
                }
            }

            if (m_OnTextResult != null)
            {
                m_OnTextResult(WitAiSpeechToTextResponseJSONParser.GetTextResultFromResponseJSON(responseJSON));
            }

            m_TempAudioComponent.ClearTempAudioFiles();
        }
        /// <summary>
        /// Computes the accuracy (percentage) of the end text results in comparison to the given phrase, by using
        /// the Levenshtein Distance between the two strings, and displays this percentage in the results text UI.
        /// </summary>
        /// <param name="originalPhrase">The phrase to compare against</param>
        void DisplayAccuracyOfEndResults(string originalPhrase)
        {
            string speechToTextResult = StringUtilities.TrimSpecialFormatting(m_ResultsTextUI.text, new HashSet <char>(),
                                                                              m_LeadingCharsForSpecialWords, m_SurroundingCharsForSpecialText);

            originalPhrase = StringUtilities.TrimSpecialFormatting(originalPhrase, new HashSet <char>(),
                                                                   m_LeadingCharsForSpecialWords, m_SurroundingCharsForSpecialText);

            int levenDistance = StringUtilities.LevenshteinDistance(speechToTextResult, originalPhrase);

            SmartLogger.Log(DebugFlags.SpeechToTextWidgets, m_SpeechToTextService.GetType().ToString() + " compute accuracy of text: \"" + speechToTextResult + "\"");
            float accuracy = Mathf.Max(0, 100f - (100f * (float)levenDistance / (float)originalPhrase.Length));

            m_PreviousFinalResults = "[Accuracy: " + accuracy + "%] " + m_PreviousFinalResults;
            m_ResultsTextUI.text   = m_PreviousFinalResults;
        }
 /// <summary>
 /// Clears the current results text and tells the speech-to-text service to start recording.
 /// </summary>
 public void StartRecording()
 {
     SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "Start service widget recording");
     m_WillDisplayReceivedResults         = true;
     m_WaitingForLastFinalResultOfSession = false;
     m_LastResultWasFinal = false;
     if (m_PreviousFinalResults != null)
     {
         m_PreviousFinalResults = "";
     }
     if (m_PreviousFinalResults != null)
     {
         m_ResultsTextUI.text = m_PreviousFinalResults;
     }
     m_SpeechToTextService.StartRecording();
 }
コード例 #9
0
 /// <summary>
 /// Function that is called when an error occurs. If this object is waiting for
 /// a last response, then this error is treated as the last "result" of the current session.
 /// </summary>
 /// <param name="text">The error text</param>
 void OnSpeechToTextError(string text)
 {
     SmartLogger.LogError(DebugFlags.SpeechToTextWidgets, SpeechToTextServiceString() + " error: " + text);
     if (m_WillDisplayReceivedResults)
     {
         m_PreviousFinalResults += "[Error: " + text + "] ";
         results = m_PreviousFinalResults;
         if (m_WaitingForLastFinalResultOfSession)
         {
             m_WaitingForLastFinalResultOfSession = false;
             if (m_OnReceivedLastResponse != null)
             {
                 m_OnReceivedLastResponse(this);
             }
         }
     }
 }
コード例 #10
0
        public void TestSeveralWithMockRepositoryMoqs()
        {
            var repo          = new MockRepository(MockBehavior.Default);
            var logWriterMock = repo.Create <ILogWriter>();

            logWriterMock.Setup(lw => lw.Write(It.IsAny <string>()));

            var logMailerMock = repo.Create <ILogMailer>();

            logMailerMock.Setup(lm => lm.Send(It.IsAny <MailMessage>()));

            var smartLogger = new SmartLogger(logWriterMock.Object, logMailerMock.Object);

            smartLogger.WriteLine("Hello, Logger");

            repo.Verify();
        }
コード例 #11
0
        public SimulinkConnectorFixture()
        {
            Connector = null;
            Logger    = new SmartLogger();

            Logger.AddWriter(new ConsoleTextWriter());

            try
            {
                Logger.WriteInfo("Attempting to launch MATLAB COM automation server");
                Connector = new SimulinkModelImport.SimulinkConnector(Logger);
            }
            catch (Exception e)
            {
                Logger.WriteWarning("MATLAB/Simulink not found");
            }
        }
コード例 #12
0
 /// <summary>
 /// Callback function for when the streaming speech-to-text process receives output data.
 /// </summary>
 /// <param name="sender">Sender of this event</param>
 /// <param name="e">Arguments for data received event</param>
 void OnStreamingSpeechToTextProcessOutputDataReceived(object sender, DataReceivedEventArgs e)
 {
     if (e.Data != null)
     {
         string trimmedData = e.Data.Trim();
         SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "process output: " + trimmedData);
         if (trimmedData == k_ReadyToStreamDataOutputPrompt)
         {
             SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "set ready to stream data");
             m_ReadyToStreamData = true;
         }
         else if (trimmedData.StartsWith(k_ResponsePrefix))
         {
             trimmedData = trimmedData.Remove(0, k_ResponsePrefix.Length);
             m_ResponseJSONsQueue.Enqueue(trimmedData);
         }
     }
 }
コード例 #13
0
ファイル: Test_run_mdao.cs プロジェクト: landunin/meta-core
        public void UnitsDoNotMatch()
        {
            string outputDir         = "results/" + GetCurrentMethod();
            string petExperimentPath = "/@Testing/@ParametricExploration/@UnitsDoNotMatch";

            Assert.True(File.Exists(mgaFile), "Failed to generate the mga.");
            var logger       = new SmartLogger();
            var stringWriter = new StringWriter();

            logger.AddWriter(stringWriter);
            var result = DynamicsTeamTest.CyPhyPETRunner.RunReturnFull(outputDir, mgaFile, petExperimentPath, logger);

            stringWriter.Flush();
            var loggerContents = stringWriter.GetStringBuilder().ToString();

            Assert.Contains("must match unit for ", loggerContents);
            Assert.False(result.Item2.Success, "CyPhyPET should have failed.");
        }
コード例 #14
0
 /// <summary>
 /// Starts recording audio for each speech-to-text service widget if not already recording.
 /// </summary>
 void StartRecording()
 {
     if (!m_IsRecording)
     {
         SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "Start comparison recording");
         m_IsCurrentlyInSpeechToTextSession = true;
         m_IsRecording             = true;
         m_RecordButtonTextUI.text = m_RecordingText;
         m_RecordButtonImage.color = m_RecordingButtonColor;
         m_WaitingSpeechToTextServiceWidgets.Clear();
         foreach (var serviceWidget in m_SpeechToTextServiceWidgets)
         {
             SmartLogger.Log(DebugFlags.SpeechToTextWidgets, "tell service widget to start recording");
             serviceWidget.StartRecording();
             m_WaitingSpeechToTextServiceWidgets.Add(serviceWidget);
         }
     }
 }
コード例 #15
0
ファイル: ConnectorUnroller.cs プロジェクト: daoos/meta-core
        public Unroller(MgaProject proj, IMgaTraceability Traceability = null, SmartLogger Logger = null)
        {
            this.ConnectorToStandalonePortMap = new Dictionary <MgaModel, List <PortWrapper> >();

            SupportedPortTypesMeta = new HashSet <int>();
            foreach (var kind in SupportedPortTypes)
            {
                SupportedPortTypesMeta.Add(proj.RootMeta.RootFolder.DefinedFCOByName[kind, true].MetaRef);
            }

            MetaRef = new Dictionary <String, int>();
            MetaRef.Add("Component", proj.RootMeta.RootFolder.DefinedFCOByName["Component", true].MetaRef);
            MetaRef.Add("ComponentAssembly", proj.RootMeta.RootFolder.DefinedFCOByName["ComponentAssembly", true].MetaRef);
            MetaRef.Add("Connector", proj.RootMeta.RootFolder.DefinedFCOByName["Connector", true].MetaRef);
            MetaRef.Add("TestBench", proj.RootMeta.RootFolder.DefinedFCOByName["TestBench", true].MetaRef);
            MetaRef.Add("TestComponent", proj.RootMeta.RootFolder.DefinedFCOByName["TestComponent", true].MetaRef);

            if (Logger == null)
            {
                this.Logger = new GMELogger(proj, "ConnectorUnroller");
                myLogger    = true;
                this.Logger.LoggingLevel = SmartLogger.MessageType_enum.Warning;
            }
            else
            {
                this.Logger = Logger;
                myLogger    = false;
            }

            if (Traceability == null)
            {
                this.Traceability = new META.MgaTraceability();
            }
            else
            {
                this.Traceability = Traceability;
            }
        }
コード例 #16
0
 /// <summary>
 /// Function that is called when a speech-to-text result is received. If it is a final result and this widget
 /// is waiting for the last result of the session, then the widget will begin processing the end results
 /// of the session.
 /// </summary>
 /// <param name="result">The speech-to-text result</param>
 void OnTextResult(SpeechToTextResult result)
 {
     if (m_WillDisplayReceivedResults)
     {
         // For the purposes of comparing results, this just uses the first alternative
         m_LastResultWasFinal = result.IsFinal;
         if (result.IsFinal)
         {
             m_PreviousFinalResults += result.TextAlternatives[0].Text;
             results = m_PreviousFinalResults;
             SmartLogger.Log(DebugFlags.SpeechToTextWidgets, m_SpeechToTextService.GetType().ToString() + " final result");
             if (m_WaitingForLastFinalResultOfSession)
             {
                 m_WaitingForLastFinalResultOfSession = false;
                 ProcessEndResults();
             }
         }
         else
         {
             results = m_PreviousFinalResults + result.TextAlternatives[0].Text;
         }
     }
 }
コード例 #17
0
        /// <summary>
        /// Computes the accuracy (percentage) of the end text results in comparison to the given phrase, by using
        /// the Levenshtein Distance between the two strings, and displays this percentage in the results text UI.
        /// </summary>
        /// <param name="originalPhrase">The phrase to compare against</param>
        void DisplayAccuracyOfEndResults(string[] originalPhrase)
        {
            print("The computer understood " + results);
            string speechToTextResult = StringUtilities.TrimSpecialFormatting(results, new HashSet <char>(),
                                                                              m_LeadingCharsForSpecialWords, m_SurroundingCharsForSpecialText);

            for (int i = 0; i < originalPhrase.Length; i++)
            {
                originalPhrase[i] = StringUtilities.TrimSpecialFormatting(originalPhrase[i], new HashSet <char>(),
                                                                          m_LeadingCharsForSpecialWords, m_SurroundingCharsForSpecialText);

                int levenDistance = StringUtilities.LevenshteinDistance(speechToTextResult, originalPhrase[i]);
                SmartLogger.Log(DebugFlags.SpeechToTextWidgets, m_SpeechToTextService.GetType().ToString() + " compute accuracy of text: \"" + speechToTextResult + "\"");
                float accuracy = Mathf.Max(0, 100f - (100f * (float)levenDistance / (float)originalPhrase[i].Length));
                m_PreviousFinalResults = "[Accuracy: " + accuracy + "%] " + m_PreviousFinalResults;

                speechAccuracy.Add(accuracy);
                print(accuracy);
            }

            results = m_PreviousFinalResults;
            OnResult.Invoke();
        }
コード例 #18
0
        /// <summary>
        /// Translates speech to text by making a request to the speech-to-text API.
        /// </summary>
        protected override IEnumerator TranslateRecordingToText()
        {
            m_TempAudioComponent.ClearTempAudioFiles();

            // Save recorded audio to a WAV file.
            string recordedAudioFilePath = SavWav.Save(m_TempAudioComponent.TempAudioRelativePath(), AudioRecordingManager.Instance.RecordedAudio);

            //WWW request

            string _url = Constants.WitAiSpeechToTextBaseURL + "?" +
                          Constants.WitAiVersionParameterName + "=" + DateTime.Now.ToString(Constants.WitAiVersionDateFormat);
            UnityWebRequest www = new UnityWebRequest(_url, UnityWebRequest.kHttpVerbPOST);


            byte[]           bytes = File.ReadAllBytes(recordedAudioFilePath);
            UploadHandlerRaw uH    = new UploadHandlerRaw(bytes);

            uH.contentType      = "application/json";
            www.uploadHandler   = uH;
            www.downloadHandler = new DownloadHandlerBuffer();
            www.SetRequestHeader("Content-Type", "application/json");
            www.SetRequestHeader("Authorization", "Bearer " + m_APIAccessToken);

            SmartLogger.Log(DebugFlags.GoogleNonStreamingSpeechToText, "sent request");
            float startTime = Time.time;

            yield return(www.Send());

            while (!www.isDone)
            {
                yield return(null);
            }

            if (www.isError)
            {
                SmartLogger.Log(DebugFlags.GoogleNonStreamingSpeechToText, www.error);
            }
            else
            {
                SmartLogger.Log(DebugFlags.GoogleNonStreamingSpeechToText, "Form upload complete!");
            }
            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "response time: " + (Time.time - startTime));
            // Grab the response JSON once the request is done and parse it.
            var responseJSON = new JSONObject(www.downloadHandler.text, int.MaxValue);

            //END WWW

            // Construct a request with the WAV file and send it.
            //var request = new Request("POST", Constants.WitAiSpeechToTextBaseURL + "?" +
            //    Constants.WitAiVersionParameterName + "=" + DateTime.Now.ToString(Constants.WitAiVersionDateFormat));
            //request.headers.Add("Authorization", "Bearer " + m_APIAccessToken);
            //request.headers.Add("Content-Type", "audio/wav");
            //request.Bytes = File.ReadAllBytes(recordedAudioFilePath);
            //SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "Sending request");
            //request.Send();

            //while (!request.isDone)
            //{
            //    yield return null;
            //}
            //SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "response time: " + (Time.time - startTime));

            // Finally, grab the response JSON once the request is done.
            //var responseJSON = new JSONObject(request.response.Text, int.MaxValue);
            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, "Received request result");
            SmartLogger.Log(DebugFlags.WitAINonStreamingSpeechToText, responseJSON.ToString());

            string errorText = WitAiSpeechToTextResponseJSONParser.GetErrorFromResponseJSON(responseJSON);

            if (errorText != null)
            {
                if (m_OnError != null)
                {
                    m_OnError(errorText);
                }
            }

            if (m_OnTextResult != null)
            {
                m_OnTextResult(WitAiSpeechToTextResponseJSONParser.GetTextResultFromResponseJSON(responseJSON));
            }

            m_TempAudioComponent.ClearTempAudioFiles();
        }
コード例 #19
0
        private void StartAssemblyExe(CreoOpenMode mode, string guid, bool isComponentAssembly, string exeparams, string workingDir = null)
        {
            SyncedComponentData syncedCompData = null;

            if (!syncedComponents.TryGetValue(guid, out syncedCompData))
            {
                GMEConsole.Warning.WriteLine("StartAssemblyExe(): Can't find guid " + guid);
                return;
            }

            string createAssemblyExe = Path.Combine(META.VersionInfo.MetaPath, "bin", "CAD", "Creo", "bin", "CADCreoParametricMetaLink.exe");

            if (File.Exists(createAssemblyExe) == false)
            {
                GMEConsole.Error.WriteLine(String.Format("Could not find CADCreoParametricMetaLink.exe at '{0}'", createAssemblyExe));
                return;
            }
            if (workingDir == null)
            {
                workingDir = syncedCompData.WorkingDir;
            }

            try
            {
                File.Copy(Path.Combine(GetProjectDir(), "constraintdata.xml"), Path.Combine(workingDir, "constraintdata.xml"));
            }
            catch (Exception)
            {
                //  not an issue
            }

            string logfile   = Path.Combine(GetProjectDir(), "log", Path.ChangeExtension("MetaLink_CreoCreateAssembly_" + Path.GetRandomFileName(), "log"));
            string arguments = String.Format("-v debug -g -s 127.0.0.1:{4} -w \"{0}\" -l \"{1}\" {2}, -id {3}", workingDir, logfile, exeparams, syncedCompData.InstanceId, SocketQueue.port);

            arguments += " -d " + guid;

            ProcessStartInfo info = new ProcessStartInfo()
            {
                RedirectStandardOutput = true,
                RedirectStandardError  = true,
                RedirectStandardInput  = true,
                UseShellExecute        = false,
                WindowStyle            = ProcessWindowStyle.Hidden,
                CreateNoWindow         = true,
                FileName  = createAssemblyExe,
                Arguments = arguments
                            // TODO -p ?
            };

            if (mode == CreoOpenMode.OPEN_EMPTY)
            {
                info.Arguments += " -m passive";
            }
            else if (!isComponentAssembly)
            {
                info.Arguments += " -m component";
            }
            if (Configuration.ConfigProFile.Length > 0)
            {
                info.Arguments += " -c " + Configuration.ConfigProFile;
            }
            if (!string.IsNullOrWhiteSpace(syncedCompData.AuxDir))
            {
                info.Arguments += String.Format(" -a \"{0}\"", syncedCompData.AuxDir);
            }
            Process createAssembly = new Process();

            createAssembly.StartInfo = info;
            StringBuilder stdoutData = new StringBuilder();
            StringBuilder stderrData = new StringBuilder();

            createAssembly.OutputDataReceived += (o, args) =>
            {
                lock (stdoutData)
                {
                    stdoutData.Append(args.Data);
                    stdoutData.Append(System.Environment.NewLine);
                }
            };
            createAssembly.ErrorDataReceived += (o, args) =>
            {
                lock (stderrData)
                {
                    stderrData.Append(args.Data);
                    stderrData.Append(System.Environment.NewLine);
                }
            };
            createAssembly.EnableRaisingEvents = true;
            createAssembly.Exited += (o, args) =>
            {
                lock (stderrData)
                {
                    if (GMEConsole != null && createAssembly.ExitCode != 0)
                    {
                        string message = String.Format("CADCreoParametricMetaLink exited with code {0}", createAssembly.ExitCode);
                        // CADCreoParametricMetaLink attempts to log everything in its log file
                        // Ignore stderr if it doesn't contain anything useful
                        string stdErrString = stderrData.ToString();
                        if (Regex.Match(stdErrString, "\\S").Success)
                        {
                            string errlog = "CADCreoParametricCreateAssembly_err.log";
                            using (StreamWriter writer = new StreamWriter(errlog))
                            {
                                writer.Write(stdErrString);
                                writer.Close();
                            }
                            message += String.Format(", the logfile is {0} ", errlog);
                        }
                        GMEConsole.Error.WriteLine(message);
                        SyncControl.Invoke((System.Action)(() => StartupFailureCallback(stdErrString, logfile)));
                    }
                }
            };
            LastStartedInstance = syncedCompData;
            createAssembly.Start();
            if (TestMode_CreoJobObject != IntPtr.Zero)
            {
                JobObjectPinvoke.AssignProcessToJobObject(createAssembly, TestMode_CreoJobObject);
            }
            createAssembly.BeginOutputReadLine();
            createAssembly.BeginErrorReadLine();
            createAssembly.StandardInput.Close();
            GMEConsole.Info.WriteLine("Creo is starting, the logfile is " + SmartLogger.GetGMEConsoleFileLink(logfile));
            ShowStartupDialog(true);
            // createAssembly.WaitForExit(10 * 1000);
        }
コード例 #20
0
        /// <summary>
        /// Sends queued chunks of audio to the server and listens for responses.
        /// </summary>
        protected override IEnumerator StreamAudioAndListenForResponses()
        {
            m_TempAudioComponent.ClearTempAudioFiles();
            m_ResponseJSONsQueue.Clear();
            m_StreamingSpeechToTextProcessHasStarted = false;
            m_ReadyToStreamData = false;

            string jsonCredentialsPath = Path.Combine(
                Path.Combine(Application.streamingAssetsPath, k_StreamingSpeechToTextApplicationFolderName),
                m_JSONCredentialsFileName);

            if (!File.Exists(jsonCredentialsPath))
            {
                if (m_OnError != null)
                {
                    m_OnError("Missing JSON credentials file in StreamingAssets/GoogleStreamingSpeechToTextProgram");
                }
                yield break;
            }

            // Initialize streaming speech-to-text process with appropriate start info, including the path to the credentials file.
            m_StreamingSpeechToTextProcess = new Process();
            m_StreamingSpeechToTextProcess.StartInfo.FileName = Path.Combine(
                Path.Combine(Application.streamingAssetsPath, k_StreamingSpeechToTextApplicationFolderName),
                k_StreamingSpeechToTextApplicationFileName);
            m_StreamingSpeechToTextProcess.StartInfo.Arguments              = jsonCredentialsPath;
            m_StreamingSpeechToTextProcess.StartInfo.CreateNoWindow         = true;
            m_StreamingSpeechToTextProcess.StartInfo.UseShellExecute        = false;
            m_StreamingSpeechToTextProcess.StartInfo.RedirectStandardInput  = true;
            m_StreamingSpeechToTextProcess.StartInfo.RedirectStandardOutput = true;
            m_StreamingSpeechToTextProcess.OutputDataReceived += OnStreamingSpeechToTextProcessOutputDataReceived;

            SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "start streaming speech-to-text process");
            m_StreamingSpeechToTextProcess.Start();
            m_StreamingSpeechToTextProcess.BeginOutputReadLine();
            m_StreamingSpeechToTextProcessHasStarted = true;

            while (!m_ReadyToStreamData)
            {
                yield return(null);
            }

            // TODO: I don't know why, but I need to write garbage text first.
            // For some reason the first standard input begins with "0x3F3F3F".
            SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "ready to stream data");
            m_StreamingSpeechToTextProcess.StandardInput.WriteLine("clear input stream");

            // Tell the process to start streaming.
            m_StreamingSpeechToTextProcess.StandardInput.WriteLine(k_StartStreamingDataInputPrompt);

            StartCoroutine(ProcessResponseJSONs());

            // While still recording, send chunks as they arrive in the queue.
            while (m_IsRecording)
            {
                while (m_AudioChunksQueue.Count == 0)
                {
                    yield return(null);
                }
                yield return(SaveAndSendNextChunk());
            }
            SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "stopped recording");

            // Send any remaining chunks.
            while (m_AudioChunksQueue.Count > 0)
            {
                yield return(SaveAndSendNextChunk());
            }
            SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "sent all chunks");

            // Tell the process to stop streaming.
            m_StreamingSpeechToTextProcess.StandardInput.WriteLine(k_StopStreamingDataInputPrompt);

            // Wait a specified number of seconds for a final result.
            float timeElapsedAfterRecording = 0;

            while (!m_LastResult.IsFinal && timeElapsedAfterRecording < m_SessionTimeoutAfterDoneRecording)
            {
                yield return(null);

                timeElapsedAfterRecording += Time.deltaTime;
            }
            SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "session timeout");

            // If still determining a final result, just treat the last result processed as a final result.
            if (!m_LastResult.IsFinal)
            {
                SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "treat last result as final result");
                m_LastResult.IsFinal = true;
                if (m_OnTextResult != null)
                {
                    m_OnTextResult(m_LastResult);
                }
            }

            while (!m_StreamingSpeechToTextProcess.HasExited)
            {
                yield return(null);
            }
            SmartLogger.Log(DebugFlags.GoogleStreamingSpeechToText, "streaming speech-to-text process exited");

            m_TempAudioComponent.ClearTempAudioFiles();
        }
コード例 #21
0
 /// <summary>
 /// Initialization function called on the frame when the script is enabled just before any of the Update
 /// methods is called the first time.
 /// </summary>
 protected override void Start()
 {
     base.Start();
     SmartLogger.LogError(DebugFlags.GoogleStreamingSpeechToText, "This service is only supported on Windows.");
 }
コード例 #22
0
        /// <summary>
        /// Translates speech to text by making a request to the speech-to-text API.
        /// </summary>
        protected override IEnumerator TranslateRecordingToText()
        {
            m_TempAudioComponent.ClearTempAudioFiles();

            // Save recorded audio to a WAV file and convert it to FLAC format.
            string wavAudioFilePath  = SavWav.Save(m_TempAudioComponent.TempAudioRelativePath(), AudioRecordingManager.Instance.RecordedAudio);
            string flacAudioFilePath = IOUtilities.MakeFilePathUnique(Path.ChangeExtension(wavAudioFilePath, "flac"));

            SmartLogger.Log(DebugFlags.GoogleNonStreamingSpeechToText, "converting audio");
            var audioConversionJob = new SoXAudioConversionJob(wavAudioFilePath, flacAudioFilePath, 16000);

            audioConversionJob.Start();
            yield return(StartCoroutine(audioConversionJob.WaitFor()));

            if (audioConversionJob.ErrorMessage != null)
            {
                if (m_OnError != null)
                {
                    m_OnError(audioConversionJob.ErrorMessage);
                }
                yield break;
            }

            var request = new Request("POST", Constants.GoogleNonStreamingSpeechToTextURL +
                                      "?" + Constants.GoogleAPIKeyParameterName + "=" + m_APIKey);

            request.headers.Add("Content-Type", "application/json");

            // Construct JSON request body.
            JSONObject requestJSON   = new JSONObject();
            JSONObject requestConfig = new JSONObject();

            requestConfig.AddField(Constants.GoogleRequestJSONConfigEncodingFieldKey, "FLAC");
            requestConfig.AddField(Constants.GoogleRequestJSONConfigSampleRateFieldKey, "16000");
            JSONObject requestAudio = new JSONObject();

            requestAudio.AddField(Constants.GoogleRequestJSONAudioContentFieldKey, Convert.ToBase64String(File.ReadAllBytes(flacAudioFilePath)));
            requestJSON.AddField(Constants.GoogleRequestJSONConfigFieldKey, requestConfig);
            requestJSON.AddField(Constants.GoogleRequestJSONAudioFieldKey, requestAudio);

            request.Text = requestJSON.ToString();
            request.Send();
            SmartLogger.Log(DebugFlags.GoogleNonStreamingSpeechToText, "sent request");

            while (!request.isDone)
            {
                yield return(null);
            }

            // Grab the response JSON once the request is done and parse it.
            var responseJSON = new JSONObject(request.response.Text, int.MaxValue);

            SmartLogger.Log(DebugFlags.GoogleNonStreamingSpeechToText, responseJSON.ToString());

            string errorText = GoogleSpeechToTextResponseJSONParser.GetErrorFromResponseJSON(responseJSON);

            if (errorText != null)
            {
                if (m_OnError != null)
                {
                    m_OnError(errorText);
                }
            }

            SpeechToTextResult textResult;
            JSONObject         resultsJSON = responseJSON.GetField(Constants.GoogleResponseJSONResultsFieldKey);

            if (resultsJSON != null && resultsJSON.Count > 0)
            {
                JSONObject resultJSON = resultsJSON[0];
                textResult = GoogleSpeechToTextResponseJSONParser.GetTextResultFromResultJSON(resultJSON);
            }
            else
            {
                textResult = GoogleSpeechToTextResponseJSONParser.GetDefaultGoogleSpeechToTextResult();
            }
            if (m_OnTextResult != null)
            {
                m_OnTextResult(textResult);
            }

            m_TempAudioComponent.ClearTempAudioFiles();
        }
 /// <summary>
 /// Removes a function from the recording timeout delegate.
 /// </summary>
 /// <param name="action">Function to unregister</param>
 public void UnregisterOnRecordingTimeout(Action action)
 {
     SmartLogger.Log(DebugFlags.SpeechToTextWidgets, SpeechToTextServiceString() + " unregister timeout");
     m_OnRecordingTimeout -= action;
 }
コード例 #24
0
ファイル: CyPhyElaborateCS.cs プロジェクト: daoos/meta-core
        public bool Main(MgaProject project, MgaFCO currentobj, MgaFCOs selectedobjs, ComponentStartMode startMode)
        {
            this.Logger.WriteInfo("CyPhyElaborate 2.0 started.");
            PumpMessages();

            bool success = this.Check(currentobj);

            if (success == false)
            {
                this.Logger.WriteError("CyPhyElaborate 2.0 context is invalid.");
                return(success);
            }

            System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
            sw.Start();
            try
            {
                this.ElaborateContext(currentobj);
            }
            catch (ElaboratorException ex)
            {
                this.Logger.WriteError(ex.Message);
                this.Logger.WriteError("CyPhyElaborate 2.0 finished with errors.");
                PumpMessages();

                // make sure we abort the transaction
                throw ex;
            }
            catch (NotSupportedException ex)
            {
                this.Logger.WriteError(ex.Message);
                this.Logger.WriteError("CyPhyElaborate 2.0 finished with errors.");
                PumpMessages();

                throw ex;
            }

            sw.Stop();
            this.Logger.WriteDebug("Time: {0}", sw.Elapsed.ToString("c"));

            sw.Restart();
            this.Logger.WriteInfo("CyPhyFormulaEvaluator 1.0 started");
            try
            {
                // create formula evaluator type
                // FIXME: calling the elaborator is faster than calling the formula evaluator
                Type            typeFormulaEval = Type.GetTypeFromProgID("MGA.Interpreter.CyPhyFormulaEvaluator");
                IMgaComponentEx formulaEval     = Activator.CreateInstance(typeFormulaEval) as IMgaComponentEx;

                // empty selected object set
                Type    typeMgaFCOs  = Type.GetTypeFromProgID("Mga.MgaFCOs");
                MgaFCOs selectedObjs = Activator.CreateInstance(typeMgaFCOs) as MgaFCOs;

                // initialize formula evauator
                formulaEval.Initialize(project);

                // automation means no UI element shall be shown by the interpreter
                formulaEval.ComponentParameter["automation"] = "true";

                // do not write to the console
                formulaEval.ComponentParameter["console_messages"] = "off";

                // do not expand nor collapse the model
                formulaEval.ComponentParameter["expanded"] = "true";

                // do not generate the post processing python scripts
                // FIXME: Why should we generate them ???
                formulaEval.ComponentParameter["do_not_generate_post_processing"] = "true";

                formulaEval.ComponentParameter["traceability"] = Traceability;

                // call the formula evaluator and update all parameters starting from the current object
                try
                {
                    formulaEval.InvokeEx(project, currentobj, selectedObjs, 128);
                    numericLeafNodes = (string[])formulaEval.ComponentParameter["numericLeafNodes"];
                    this.Logger.WriteInfo("CyPhyFormulaEvaluator 1.0 finished");
                }
                catch (COMException e)
                {
                    success = false;
                    this.Logger.WriteError(e.Message);
                    this.Logger.WriteError("CyPhyFormulaEvaluator 1.0 finished with errors");
                }
            }
            catch (Exception ex)
            {
                this.Logger.WriteInfo("CyPhyFormulaEvaluator 1.0 failed");
                this.Logger.WriteDebug(ex.ToString());
            }

            sw.Stop();
            this.Logger.WriteDebug("Formula evaluator runtime: {0}", sw.Elapsed.ToString("c"));


            if (UnrollConnectors)
            {
                sw.Restart();
                this.Logger.WriteInfo("ConnectorUnroller started");
                try
                {
                    var kindCurrentObj = currentobj.MetaBase.Name;
                    if (kindCurrentObj == "ComponentAssembly")
                    {
                        using (Unroller unroller = new Unroller(currentobj.Project, Traceability, Logger))
                        {
                            unroller.UnrollComponentAssembly(currentobj as MgaModel);
                        }
                    }
                    else if (kindCurrentObj == "TestBench")
                    {
                        using (Unroller unroller = new Unroller(currentobj.Project, Traceability, Logger))
                        {
                            unroller.UnrollTestBench(currentobj as MgaModel);
                        }
                    }


                    this.Logger.WriteInfo("ConnectorUnroller finished");
                }
                catch (Exception ex)
                {
                    this.Logger.WriteInfo(ex.Message);
                    this.Logger.WriteDebug(ex.ToString());
                    this.Logger.WriteError("ConnectorUnroller failed. Check " + SmartLogger.GetGMEConsoleFileLink(this.Logger.LogFilenames[0], "detailed log") + " for details.");
                    success = false;
                }
                sw.Stop();
                this.Logger.WriteDebug("ConnectorUnroller runtime: {0}", sw.Elapsed.ToString("c"));
            }

            this.Logger.WriteInfo("CyPhyElaborate 2.0 finished.");
            PumpMessages();

            return(success);
        }