コード例 #1
0
        public static System.Speech.Synthesis.SpeechSynthesizer Setupvoice(string gender, string age)
        {
            System.Speech.Synthesis.SpeechSynthesizer voice = new System.Speech.Synthesis.SpeechSynthesizer();


            switch (gender)
            {
            case "Male":

                if (age == "Child")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Male, System.Speech.Synthesis.VoiceAge.Child);
                }
                else if (age == "Teen")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Male, System.Speech.Synthesis.VoiceAge.Teen);
                }
                else if (age == "Adult")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Male, System.Speech.Synthesis.VoiceAge.Adult);
                }
                else if (age == "Senior")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Male, System.Speech.Synthesis.VoiceAge.Senior);
                }
                break;



            case "Female":

                if (age == "Child")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female, System.Speech.Synthesis.VoiceAge.Child);
                }
                else if (age == "Teen")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female, System.Speech.Synthesis.VoiceAge.Teen);
                }
                else if (age == "Adult")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female, System.Speech.Synthesis.VoiceAge.Adult);
                }
                else if (age == "Senior")
                {
                    voice.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female, System.Speech.Synthesis.VoiceAge.Senior);
                }
                break;
            }



            return(voice);
        }
コード例 #2
0
        /// <summary>
        /// Speak the image description.
        /// </summary>
        /// <param name="description">The image description to speak.</param>
        private async Task SpeakDescription(string description)
        {
            // set up an adult female voice synthesizer
            var synth = new System.Speech.Synthesis.SpeechSynthesizer();

            synth.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Adult);

            // speak the description using the builtin synthesizer
            await Task.Run(() => { synth.SpeakAsync(description); });
        }
コード例 #3
0
        /// <summary>
        /// Initializes a new instance of the MainWindow class
        /// </summary>
        ///



        public MainWindow()
        {
            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the MainWindow
            this.InitializeComponent();
            try
            {
                sp.PortName = "COM6";
                sp.BaudRate = 9600;
                sp.Open();
            }

            catch (Exception)
            {
                MessageBox.Show("Please give a valid port number or check your connection");
            }


            speaker.SelectVoiceByHints(System.Speech.Synthesis.VoiceGender.Female);


            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
            // int col0Row = 0;
            //  int col1Row = 0;
            int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;

            //int maxBodies = 1;
            for (int i = 0; i < maxBodies; ++i)
            {
                GestureResultView result   = new GestureResultView(i, false, false, 0.0f);
                GestureDetector   detector = new GestureDetector(this.kinectSensor, result);
                this.gestureDetectorList.Add(detector);

                // split gesture results across the first two columns of the content grid
                ContentControl contentControl = new ContentControl();
                contentControl.Content = this.gestureDetectorList[i].GestureResultView;

                if (this.kinectSensor != null)
                {
                    // open the sensor
                    this.kinectSensor.Open();


                    // grab the audio stream
                    IReadOnlyList <AudioBeam> audioBeamList = this.kinectSensor.AudioSource.AudioBeams;
                    System.IO.Stream          audioStream   = audioBeamList[0].OpenInputStream();

                    // create the convert stream
                    this.convertStream = new KinectAudioStream(audioStream);
                }
                else
                {
                    // on failure, set the status text

                    return;
                }

                RecognizerInfo ri = TryGetKinectRecognizer();

                if (null != ri)
                {
                    this.speechEngine = new SpeechRecognitionEngine(ri.Id);



                    // Create a grammar from grammar definition XML file.
                    using (var memoryStream = new MemoryStream(Encoding.ASCII.GetBytes(Properties.Resources.SpeechGrammar)))
                    {
                        var g = new Grammar(memoryStream);
                        this.speechEngine.LoadGrammar(g);
                    }

                    this.speechEngine.SpeechRecognized          += this.SpeechRecognized;
                    this.speechEngine.SpeechRecognitionRejected += this.SpeechRejected;

                    // let the convertStream know speech is going active
                    this.convertStream.SpeechActive = true;

                    // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model.
                    // This will prevent recognition accuracy from degrading over time.
                    ////speechEngine.UpdateRecognizerSetting("AdaptationOn", 0);

                    this.speechEngine.SetInputToAudioStream(
                        this.convertStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
                    this.speechEngine.RecognizeAsync(RecognizeMode.Multiple);
                }
                else
                {
                    //this.statusBarText.Text = Properties.Resources.NoSpeechRecognizer;
                }
            }

            ContentControl contentControl2 = new ContentControl();

            contentControl2.Content = this.gestureDetectorList[0].GestureResultView;
            this.contentGrid.Children.Add(contentControl2);
        }
コード例 #4
0
        public MainForm()
        {
            InitializeComponent();

            s.SelectVoiceByHints(gender, age);
        }