// [END authenticating]
 // [START run_application]
 public static void Main(string[] args)
 {
     if (args.Count() < 1)
     {
         Console.WriteLine("Usage:\nTranscribe audio_file");
         return;
     }
     var service = CreateAuthorizedClient();
     string audio_file_path = args[0];
     // [END run_application]
     // [START construct_request]
     var request = new Google.Apis.CloudSpeechAPI.v1beta1.Data.SyncRecognizeRequest()
     {
         Config = new Google.Apis.CloudSpeechAPI.v1beta1.Data.RecognitionConfig()
         {
             Encoding = "LINEAR16",
             SampleRate = 16000,
             LanguageCode = "en-US"
         },
         Audio = new Google.Apis.CloudSpeechAPI.v1beta1.Data.RecognitionAudio()
         {
             Content = Convert.ToBase64String(File.ReadAllBytes(audio_file_path))
         }
     };
     // [END construct_request]
     // [START send_request]
     var response = service.Speech.Syncrecognize(request).Execute();
     foreach (var result in response.Results)
     {
         foreach (var alternative in result.Alternatives)
             Console.WriteLine(alternative.Transcript);
     }
     // [END send_request]
 }
Пример #2
0
        // [END authenticating]

        // [START run_application]
        static public void Main(string[] args)
        {
            if (args.Count() < 1)
            {
                Console.WriteLine("Usage:\nTranscribe audio_file");
                return;
            }
            var    service         = CreateAuthorizedClient();
            string audio_file_path = args[0];
            // [END run_application]
            // [START construct_request]
            var request = new Google.Apis.CloudSpeechAPI.v1beta1.Data.SyncRecognizeRequest()
            {
                Config = new Google.Apis.CloudSpeechAPI.v1beta1.Data.RecognitionConfig()
                {
                    Encoding     = "LINEAR16",
                    SampleRate   = 16000,
                    LanguageCode = "en-US"
                },
                Audio = new Google.Apis.CloudSpeechAPI.v1beta1.Data.RecognitionAudio()
                {
                    Content = Convert.ToBase64String(File.ReadAllBytes(audio_file_path))
                }
            };
            // [END construct_request]
            // [START send_request]
            var response = service.Speech.Syncrecognize(request).Execute();

            foreach (var result in response.Results)
            {
                foreach (var alternative in result.Alternatives)
                {
                    Console.WriteLine(alternative.Transcript);
                }
            }
            // [END send_request]
        }
Пример #3
0
 /// <summary>Constructs a new Syncrecognize request.</summary>
 public SyncrecognizeRequest(Google.Apis.Services.IClientService service, Google.Apis.CloudSpeechAPI.v1beta1.Data.SyncRecognizeRequest body)
     : base(service)
 {
     Body = body;
     InitParameters();
 }
Пример #4
0
 /// <summary>Perform synchronous speech-recognition: receive results after all audio has been sent and
 /// processed.</summary>
 /// <param name="body">The body of the request.</param>
 public virtual SyncrecognizeRequest Syncrecognize(Google.Apis.CloudSpeechAPI.v1beta1.Data.SyncRecognizeRequest body)
 {
     return(new SyncrecognizeRequest(service, body));
 }
Пример #5
0
        void Transcribe()
        {
            try
            {
                System.Diagnostics.Stopwatch s = new System.Diagnostics.Stopwatch();
                s.Start();

                var service = CreateAuthorizedClient();

                string audio_file_path = AppDomain.CurrentDomain.BaseDirectory + @"\SpeechAud.wav";
                var    request         = new Google.Apis.CloudSpeechAPI.v1beta1.Data.SyncRecognizeRequest()
                {
                    Config = new Google.Apis.CloudSpeechAPI.v1beta1.Data.RecognitionConfig()
                    {
                        Encoding     = "LINEAR16",
                        SampleRate   = 16000,
                        LanguageCode = cmbLanguage.SelectedText.ToString()
                    },
                    Audio = new Google.Apis.CloudSpeechAPI.v1beta1.Data.RecognitionAudio()
                    {
                        Content = Convert.ToBase64String(File.ReadAllBytes(audio_file_path))
                    }
                };
                txtSpeech.Text = "";
                var response = service.Speech.Syncrecognize(request).Execute();
                foreach (var result in response.Results)
                {
                    foreach (var alternative in result.Alternatives)
                    {
                        txtSpeech.AppendText(alternative.Transcript);
                    }
                }
                System.Threading.Thread.CurrentThread.CurrentCulture = new System.Globalization.CultureInfo("en-US");


                //RecognizerInfo recognizerInfo = null;
                //SpeechRecognitionEngine sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("en-US"));
                //foreach (RecognizerInfo ri in SpeechRecognitionEngine.InstalledRecognizers())
                //{
                //    if ((ri.Culture.TwoLetterISOLanguageName.Equals("en")) && (recognizerInfo == null))
                //    {
                //        recognizerInfo = ri;
                //        break;
                //    }
                //}

                //sre.SetInputToWaveFile(@"C:\Users\thangaprakashm\Downloads\dotnet-docs-samples-master\dotnet-docs-samples-master\speech\api\resources\naudio.wav");

                //Choices colors = new Choices();
                //colors.Add(new string[] { "red", "green", "blue" });
                //GrammarBuilder gb = new GrammarBuilder();
                //gb.Culture = recognizerInfo.Culture;
                //gb.Append(colors);

                //// Create the Grammar instance.
                //Grammar g = new Grammar(gb);
                //sre.LoadGrammar(g);
                //sre.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(sre_SpeechRecognized);
                //sre.Recognize();

                txtInfo.Text = "Time Taken : " + s.ElapsedMilliseconds;
            }
            catch (Exception ex)
            {
                txtInfo.Text = "Google Error : " + ex.Message;
            }
        }