Ejemplo n.º 1
0
        /// <summary>
        /// Triggered when media element used to play synthesized speech messages is loaded.
        /// Initializes SpeechHelper and greets user.
        /// </summary>
        private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
        {
            if (speech == null)
            {
                speech = new SpeechHelper(speechMediaElement);
                await speech.Read(SpeechContants.InitialGreetingMessage);
            }
            else
            {
                // Prevents media element from creating again the SpeechHelper when user signed off
                speechMediaElement.AutoPlay = false;

                // We speech the Initial GreetingMessage only if the last page was Login
                if (Frame.BackStack.Last().GetType() == typeof(Login))
                {
                    await speech.Read(SpeechContants.InitialGreetingMessage);
                }
            }
        }
Ejemplo n.º 2
0
 /// <summary>
 /// Triggered when media element used to play synthesized speech messages is loaded.
 /// Initializes SpeechHelper and greets user.
 /// </summary>
 private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         speech = new SpeechHelper(speechMediaElement);
         await speech.Read(SpeechContants.InitialGreetingMessage);
     }
     else
     {
         // Prevents media element from re-greeting visitor
         speechMediaElement.AutoPlay = false;
     }
 }
Ejemplo n.º 3
0
 private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         sanyaResponses = new SanyaResponses();
         speech         = new SpeechHelper(speechElement);
         await speech.Read(sanyaResponses.returnResponse());
     }
     else
     {
         // Prevents media element from re-greeting visitor
         speechElement.AutoPlay = false;
     }
 }
Ejemplo n.º 4
0
        private async Task DoorbellPressed()
        {
            StorageFile file = null;

            if (webcam.IsInitialized())
            {
                // Stores current frame from webcam feed in a temporary folder
                file = await webcam.CapturePhoto();

                FaceQuery(file);
            }
            else
            {
                if (!webcam.IsInitialized())
                {
                    // The webcam has not been fully initialized for whatever reason:
                    Debug.WriteLine("Unable to analyze visitor at door as the camera failed to initlialize properly.");
                    await speech.Read(SpeechContants.NoCameraMessage);
                }
            }
            doorbellJustPressed = false;
            //FaceQuery(file);
        }
Ejemplo n.º 5
0
        //private async void OnTimerTick(object state)
        //{
        //    //GetPhotoFromCam();
        //}

        /*
         * async void GetPhotoFromCam()
         * {
         *  if (!IsWebCamReady) return;
         *
         *  var photo = await TakePhoto();
         *  //call computer vision
         *  if (photo == null) return;
         *
         *  var result = await ApiContainer.GetApi<ComputerVisionService>().GetImageAnalysis(photo);
         *  if (result != null)
         *  {
         *      var item = new TonyVisionObj();
         *      if (result.Adult != null)
         *      {
         *          item.adultContent = result.Adult.IsAdultContent.ToString();
         *          item.adultScore = result.Adult.AdultScore.ToString();
         *      }
         *      else
         *      {
         *          item.adultContent = "False";
         *          item.adultScore = "0";
         *      }
         *
         *      if (result.Faces != null && result.Faces.Length > 0)
         *      {
         *          int count = 0;
         *          item.facesCount = result.Faces.Count();
         *          foreach (var face in result.Faces)
         *          {
         *              count++;
         *              if (count > 1)
         *              {
         *                  item.facesDescription += ",";
         *              }
         *              item.facesDescription += $"[Face : {count}; Age : { face.Age }; Gender : {face.Gender}]";
         *
         *          }
         *      }
         *      else
         *          item.facesCount = 0;
         *
         *
         *
         *      if (result.Description != null)
         *      {
         *          var Speak = "";
         *          foreach (var caption in result.Description.Captions)
         *          {
         *              Speak += $"[Caption : {caption.Text }; Confidence : {caption.Confidence};],";
         *          }
         *          string tags = "[Tags : ";
         *          foreach (var tag in result.Description.Tags)
         *          {
         *              tags += tag + ", ";
         *          }
         *          Speak += tags + "]";
         *          item.description = Speak;
         *      }
         *
         *      if (result.Tags != null)
         *      {
         *
         *          foreach (var tag in result.Tags)
         *          {
         *              item.tags += "[ Name : " + tag.Name + "; Confidence : " + tag.Confidence + "; Hint : " + tag.Hint + "], ";
         *          }
         *      }
         *      var IsUpload = false;
         *      if (item.description != null)
         *      {
         *          if (item.description.ToLower().Contains("person") || item.description.ToLower().Contains("people"))
         *          {
         *              IsUpload = true;
         *          }
         *      }
         *      if (item.tags != null)
         *      {
         *          if (item.tags.ToLower().Contains("man") || item.tags.ToLower().Contains("woman"))
         *          {
         *              IsUpload = true;
         *          }
         *      }
         *      if (IsUpload)
         *      {
         *          var uploadRes = await BlobEngine.UploadFile(photo);
         *          Debug.WriteLine($"upload : {uploadRes}");
         *      }
         *      item.tanggal = DateTime.Now;
         *      var JsonObj = new StringContent(JsonConvert.SerializeObject(item), Encoding.UTF8, "application/json");
         *      var res = await httpClient.PostAsync(APPCONTANTS.ApiUrl, JsonObj);
         *      if (res.IsSuccessStatusCode)
         *      {
         *          Debug.WriteLine("vision captured");
         *
         *      }
         *
         *
         *  }
         * }*/
        /// <summary>
        /// Triggered when media element used to play synthesized speech messages is loaded.
        /// Initializes SpeechHelper and greets user.
        /// </summary>
        private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
        {
            if (speech == null)
            {
                speech = new SpeechHelper(speechMediaElement);

                await speech.Read("tony is ready to serve");
            }
            else
            {
                // Prevents media element from re-greeting visitor
                speechMediaElement.AutoPlay = false;
            }
        }
Ejemplo n.º 6
0
 private async void timer_Tick(object sender, object e)
 {
     elapsedTime++;
     Debug.WriteLine("ElapsedTime: " + elapsedTime);
     if (elapsedTime == 15)
     {
         await speech.Read(SpeechContants.CountDownLogin);
     }
     else if (elapsedTime == 30)
     {
         //Add Red GPIO Notification
         timer.Stop();
     }
 }
Ejemplo n.º 7
0
        private void HandleRecognitionResult(SpeechRecognitionResult speechRecognitionResult)
        {
            if (speechRecognitionResult.Status == SpeechRecognitionResultStatus.Success)
            {
                if (speechRecognitionResult.Confidence == SpeechRecognitionConfidence.Low || speechRecognitionResult.Confidence == SpeechRecognitionConfidence.Rejected)
                {
                    speech.Read("Sorry! I did not understand that! Can you repeat again?");

                    //this.repeat();
                }
                else
                {
                    switch (speechRecognitionResult.Text)
                    {
                    case "Play the song":
                        speech.Read("All right! Next song comming up!");
                        resultTextBlock.Text = "Playing the song...";
                        break;

                    case "Introduce yourself":
                        speech.Read(SpeechContants.Introduction);
                        Frame.Navigate(typeof(MainPage));
                        break;

                    case "Who are your creators":
                        speech.Read(SpeechContants.Creators);
                        Frame.Navigate(typeof(MainPage));
                        break;

                    case "Which day is it":
                        DateTime thisDay = DateTime.Today;
                        speech.Read(thisDay.ToString("D"));
                        resultTextBlock.Text = thisDay.ToString("D");
                        break;


                    default:
                        speech.Read("I have limited functioning capabilities as of now.");
                        Frame.Navigate(typeof(MainPage));
                        break;
                    }
                }
            }
        }
Ejemplo n.º 8
0
        private async void CameraControl_AutoCaptureStateChanged(object sender, AutoCaptureState e)
        {
            switch (e)
            {
            case AutoCaptureState.WaitingForFaces:
                this.cameraGuideBallon.Opacity = 1;
                this.cameraGuideText.Text      = "Пожалуйста, встаньте перед камерой!";
                await speech.Read(this.cameraGuideText.Text);

                this.cameraGuideHost.Opacity = 1;
                break;

            case AutoCaptureState.WaitingForStillFaces:
                this.cameraGuideText.Text = "Пожалуйста, не двигайтесь...";
                break;

            case AutoCaptureState.ShowingCountdownForCapture:
                this.cameraGuideText.Text      = "";
                this.cameraGuideBallon.Opacity = 0;

                this.cameraGuideCountdownHost.Opacity = 1;
                this.countDownTextBlock.Text          = "3";
                await Task.Delay(350);

                this.countDownTextBlock.Text = "2";
                await Task.Delay(350);

                this.countDownTextBlock.Text = "1";
                await Task.Delay(350);

                this.cameraGuideCountdownHost.Opacity = 0;

                this.ProcessCameraCapture(await this.cameraControl.TakeAutoCapturePhoto());

                break;

            case AutoCaptureState.ShowingCapturedPhoto:
                this.cameraGuideHost.Opacity = 0;
                break;

            default:
                break;
            }
        }
Ejemplo n.º 9
0
        private async void Speak(Face[] f)
        {
            await speech.Read("Здравствуйте, вам" + f[0].FaceAttributes.Age.ToString().Split(',')[0] + "и вы" + f[0].FaceAttributes.Gender + ".");

            //Wait();
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Called when user hits physical or vitual doorbell buttons. Captures photo of current webcam view and sends it to Oxford for facial recognition processing.
        /// </summary>
        private async Task DoorbellPressed()
        {
            // Display analysing visitors grid to inform user that doorbell press was registered
            AnalysingVisitorGrid.Visibility = Visibility.Visible;

            // List to store visitors recognized by Oxford Face API
            // Count will be greater than 0 if there is an authorized visitor at the door
            List <string> recognizedVisitors = new List <string>();

            // Confirms that webcam has been properly initialized and oxford is ready to go
            if (webcam.IsInitialized() && initializedOxford)
            {
                // Stores current frame from webcam feed in a temporary folder
                StorageFile image = await webcam.CapturePhoto();

                try
                {
                    // Oxford determines whether or not the visitor is on the Whitelist and returns true if so
                    recognizedVisitors = await OxfordFaceAPIHelper.IsFaceInWhitelist(image);
                }
                catch (FaceRecognitionException fe)
                {
                    switch (fe.ExceptionType)
                    {
                    // Fails and catches as a FaceRecognitionException if no face is detected in the image
                    case FaceRecognitionExceptionType.NoFaceDetected:
                        Debug.WriteLine("WARNING: No face detected in this image.");
                        break;
                    }
                }
                catch (FaceAPIException faceAPIEx)
                {
                    Debug.WriteLine("FaceAPIException in IsFaceInWhitelist(): " + faceAPIEx.ErrorMessage);
                }
                catch
                {
                    // General error. This can happen if there are no visitors authorized in the whitelist
                    Debug.WriteLine("WARNING: Oxford just threw a general expception.");
                }

                if (recognizedVisitors.Count > 0)
                {
                    // If everything went well and a visitor was recognized, unlock the door:
                    AuthenticateUser(recognizedVisitors[0]);
                }
                else
                {
                    // Otherwise, inform user that they were not recognized by the system
                    await speech.Read(SpeechContants.VisitorNotRecognizedMessage);
                }
            }
            else
            {
                if (!webcam.IsInitialized())
                {
                    // The webcam has not been fully initialized for whatever reason:
                    Debug.WriteLine("Unable to analyze visitor at door as the camera failed to initlialize properly.");
                    await speech.Read(SpeechContants.NoCameraMessage);
                }

                if (!initializedOxford)
                {
                    // Oxford is still initializing:
                    Debug.WriteLine("Unable to analyze visitor at door as Oxford Facial Recogntion is still initializing.");
                }
            }

            doorbellJustPressed             = false;
            AnalysingVisitorGrid.Visibility = Visibility.Collapsed;
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Render ObjectDetector skill results
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="objectDetections"></param>
        /// <returns></returns>
        private async Task DisplayFrameAndResultAsync(VideoFrame frame, int CCTVIndex)
        {
            await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async() =>
            {
                try
                {
                    SoftwareBitmap savedBmp = null;
                    if (frame.SoftwareBitmap != null)
                    {
                        await m_processedBitmapSource[CCTVIndex].SetBitmapAsync(frame.SoftwareBitmap);
                        savedBmp = frame.SoftwareBitmap;
                    }
                    else
                    {
                        var bitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(frame.Direct3DSurface, BitmapAlphaMode.Ignore);
                        await m_processedBitmapSource[CCTVIndex].SetBitmapAsync(bitmap);
                        savedBmp = bitmap;
                    }

                    // Retrieve and filter results if requested
                    IReadOnlyList <ObjectDetectorResult> objectDetections = m_binding.DetectedObjects;
                    if (m_objectKinds?.Count > 0)
                    {
                        objectDetections = objectDetections.Where(det => m_objectKinds.Contains(det.Kind)).ToList();
                    }
                    if (objectDetections != null)
                    {
                        // Update displayed results
                        m_bboxRenderer[CCTVIndex].Render(objectDetections);
                        bool PersonDetected = false;
                        int PersonCount     = 0;
                        var rects           = new List <Rect>();
                        foreach (var obj in objectDetections)
                        {
                            if (obj.Kind.ToString().ToLower() == "person")
                            {
                                PersonCount++;
                                PersonDetected = true;
                                rects.Add(obj.Rect);
                            }
                        }
                        if (PersonDetected)
                        {
                            bool KeepDistance = false;
                            if ((bool)ChkSocialDistancing.IsChecked)
                            {
                                //make sure there is more than 1 person
                                if (rects.Count > 1)
                                {
                                    var res = SocialDistanceHelpers.Detect(rects.ToArray());
                                    if (res.Result)
                                    {
                                        KeepDistance = true;
                                        m_bboxRenderer[CCTVIndex].DistanceLineRender(res.Lines);
                                        await speech.Read($"Please keep distance in {DataConfig.RoomName[CCTVIndex]}");
                                    }
                                }
                                else
                                {
                                    m_bboxRenderer[CCTVIndex].ClearLineDistance();
                                }
                            }
                            else
                            {
                                m_bboxRenderer[CCTVIndex].ClearLineDistance();
                            }
                            var msg = $"I saw {PersonCount} person in {DataConfig.RoomName[CCTVIndex]}";
                            if ((bool)ChkMode.IsChecked)
                            {
                                PlaySound(Sounds[Rnd.Next(0, Sounds.Count - 1)]);
                            }
                            else if (!KeepDistance)
                            {
                                await speech.Read(msg);
                            }
                            if ((bool)ChkPatrol.IsChecked)
                            {
                                await NotificationService.SendMail("Person Detected in BMSpace", msg, DataConfig.MailTo, DataConfig.MailFrom);
                                await NotificationService.SendSms(DataConfig.SmsTo, msg);
                            }
                            bool IsFaceDetected = false;
                            if ((bool)ChkDetectMask.IsChecked)
                            {
                                SoftwareBitmap softwareBitmapInput = frame.SoftwareBitmap;
                                // Retrieve a SoftwareBitmap to run face detection
                                if (softwareBitmapInput == null)
                                {
                                    if (frame.Direct3DSurface == null)
                                    {
                                        throw (new ArgumentNullException("An invalid input frame has been bound"));
                                    }
                                    softwareBitmapInput = await SoftwareBitmap.CreateCopyFromSurfaceAsync(frame.Direct3DSurface);
                                }
                                // We need to convert the image into a format that's compatible with FaceDetector.
                                // Gray8 should be a good type but verify it against FaceDetector’s supported formats.
                                const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Gray8;
                                if (FaceDetector.IsBitmapPixelFormatSupported(InputPixelFormat))
                                {
                                    using (var detectorInput = SoftwareBitmap.Convert(softwareBitmapInput, InputPixelFormat))
                                    {
                                        // Run face detection and retrieve face detection result
                                        var faceDetectionResult = await m_faceDetector.DetectFacesAsync(detectorInput);

                                        // If a face is found, update face rectangle feature
                                        if (faceDetectionResult.Count > 0)
                                        {
                                            IsFaceDetected = true;
                                            // Retrieve the face bound and enlarge it by a factor of 1.5x while also ensuring clamping to frame dimensions
                                            BitmapBounds faceBound = faceDetectionResult[0].FaceBox;
                                            var additionalOffset   = faceBound.Width / 2;
                                            faceBound.X            = Math.Max(0, faceBound.X - additionalOffset);
                                            faceBound.Y            = Math.Max(0, faceBound.Y - additionalOffset);
                                            faceBound.Width        = (uint)Math.Min(faceBound.Width + 2 * additionalOffset, softwareBitmapInput.PixelWidth - faceBound.X);
                                            faceBound.Height       = (uint)Math.Min(faceBound.Height + 2 * additionalOffset, softwareBitmapInput.PixelHeight - faceBound.Y);

                                            var maskdetect  = await MaskDetect.PredictImageAsync(frame);
                                            var noMaskCount = maskdetect.Where(x => x.TagName == "no-mask").Count();
                                            if (noMaskCount > 0)
                                            {
                                                if (!KeepDistance)
                                                {
                                                    await speech.Read($"please wear a face mask in {DataConfig.RoomName[CCTVIndex]}");
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                            if (!IsFaceDetected)
                            {
                                m_bboxRenderer[CCTVIndex].ClearMaskLabel();
                            }
                            //save to picture libs

                            /*
                             * String path = Environment.GetFolderPath(Environment.SpecialFolder.MyPictures);
                             * path += "\\CCTV";
                             * if (!Directory.Exists(path))
                             * {
                             *  Directory.CreateDirectory(path);
                             * }*/
                            var TS = DateTime.Now - LastSaved[CCTVIndex];
                            if (savedBmp != null && TS.TotalSeconds > DataConfig.CaptureIntervalSecs && (bool)ChkCapture.IsChecked)
                            {
                                var myPictures = await Windows.Storage.StorageLibrary.GetLibraryAsync(Windows.Storage.KnownLibraryId.Pictures);
                                Windows.Storage.StorageFolder rootFolder    = myPictures.SaveFolder;
                                Windows.Storage.StorageFolder storageFolder = rootFolder;
                                var folderName = "cctv";
                                try
                                {
                                    storageFolder = await rootFolder.GetFolderAsync(folderName);
                                }
                                catch
                                {
                                    storageFolder = await rootFolder.CreateFolderAsync(folderName);
                                }
                                //if (Directory.Exists($"{rootFolder.Path}\\{folderName}"))
                                //else
                                // Create sample file; replace if exists.
                                //Windows.Storage.StorageFolder storageFolder = await Windows.Storage.StorageFolder.GetFolderFromPathAsync(path);
                                Windows.Storage.StorageFile sampleFile =
                                    await storageFolder.CreateFileAsync($"cctv_{DateTime.Now.ToString("dd_MM_yyyy_HH_mm_ss")}_{CCTVIndex}.jpg",
                                                                        Windows.Storage.CreationCollisionOption.ReplaceExisting);
                                ImageHelpers.SaveSoftwareBitmapToFile(savedBmp, sampleFile);
                                LastSaved[CCTVIndex] = DateTime.Now;
                            }
                        }
                    }

                    // Update the displayed performance text
                    StatusLbl.Text = $"bind: {m_bindTime.ToString("F2")}ms, eval: {m_evalTime.ToString("F2")}ms";
                }
                catch (TaskCanceledException)
                {
                    // no-op: we expect this exception when we change media sources
                    // and can safely ignore/continue
                }
                catch (Exception ex)
                {
                    NotifyUser($"Exception while rendering results: {ex.Message}");
                }
            });
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Handle events fired when a result is generated. This may include a garbage rule that fires when general room noise
        /// or side-talk is captured (this will have a confidence of Rejected typically, but may occasionally match a rule with
        /// low confidence).
        /// </summary>
        /// <param name="sender">The Recognition session that generated this result</param>
        /// <param name="args">Details about the recognized speech</param>
        private async void ContinuousRecognitionSession_ResultGenerated(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionResultGeneratedEventArgs args)
        {
            // The garbage rule will not have a tag associated with it, the other rules will return a string matching the tag provided
            // when generating the grammar.
            string tag = "unknown";

            if (args.Result.Constraint != null)
            {
                tag = args.Result.Constraint.Tag;
            }

            // Developers may decide to use per-phrase confidence levels in order to tune the behavior of their
            // grammar based on testing.
            if (args.Result.Confidence == SpeechRecognitionConfidence.Low ||
                args.Result.Confidence == SpeechRecognitionConfidence.Medium ||
                args.Result.Confidence == SpeechRecognitionConfidence.High)
            {
                await dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                {
                    heardYouSayTextBlock.Visibility = Visibility.Visible;
                    resultTextBlock.Visibility      = Visibility.Visible;
                    resultTextBlock.Text            = string.Format("Heard: '{0}', (Tag: '{1}', Confidence: {2})", args.Result.Text, tag, args.Result.Confidence.ToString());
                    switch (tag)
                    {
                    case TagCommands.GetJoke:
                        {
                            var res = await JokeHelper.GetJoke();
                            if (!string.IsNullOrEmpty(res.value.joke))
                            {
                                await speech.Read(res.value.joke);
                                resultTextBlock.Text = res.value.joke;
                            }
                        }
                        break;

                    case TagCommands.HowOld:
                        {
                            var photo = await TakePhoto();
                            //call computer vision
                            var faces = await ApiContainer.GetApi <FaceService>().UploadAndDetectFaceAttributes(photo);
                            var res   = ApiContainer.GetApi <FaceService>().HowOld(faces);
                            if (!string.IsNullOrEmpty(res))
                            {
                                await speech.Read(res);
                                resultTextBlock.Text = res;
                            }
                        }
                        break;

                    case TagCommands.Calling:
                        await speech.Read("Yes, what can I do Boss?");
                        break;

                    case TagCommands.SeeMe:
                        {
                            var photo = await TakePhoto();
                            //call computer vision
                            var res = await ApiContainer.GetApi <ComputerVisionService>().RecognizeImage(photo);
                            if (!string.IsNullOrEmpty(res))
                            {
                                await speech.Read(res);
                                resultTextBlock.Text = "I see " + res;
                            }
                        }
                        break;

                    case TagCommands.ReadText:
                        {
                            var photo = await TakePhoto();
                            //call computer vision
                            var res = await ApiContainer.GetApi <ComputerVisionService>().RecognizeText(photo);
                            if (!string.IsNullOrEmpty(res))
                            {
                                await speech.Read(res);
                                resultTextBlock.Text = "read: " + res;
                            }
                        }
                        break;

                    case TagCommands.Stop:
                        Player1.MediaPlayer.Pause();
                        break;

                    case TagCommands.PlayBlues:
                    case TagCommands.PlaySlow:
                    case TagCommands.PlayRock:
                    case TagCommands.PlayJazz:
                    case TagCommands.PlayElectro:

                        {
                            var genre = Genre.Slow;
                            switch (tag)
                            {
                            case TagCommands.PlayBlues: genre = Genre.Blues; break;

                            case TagCommands.PlayRock: genre = Genre.Rock; break;

                            case TagCommands.PlaySlow: genre = Genre.Slow; break;

                            case TagCommands.PlayJazz: genre = Genre.Jazz; break;

                            case TagCommands.PlayElectro: genre = Genre.Electro; break;
                            }
                            var rnd    = new Random(Environment.TickCount);
                            var selIds = SongIDs[genre];
                            var num    = rnd.Next(0, selIds.Length - 1);
                            var url    = await YouTube.GetVideoUriAsync(selIds[num], YouTubeQuality.QualityLow);
                            MediaPlayerHelper.CleanUpMediaPlayerSource(Player1.MediaPlayer);
                            Player1.MediaPlayer.Source = new MediaItem(url.Uri.ToString()).MediaPlaybackItem;
                            Player1.MediaPlayer.Play();
                        }

                        break;

                    case TagCommands.TakePhoto:
                        await speech.Read("I will take your picture boss");
                        //GetPhotoFromCam();
                        break;

                    case TagCommands.Thanks:
                        await speech.Read("My pleasure boss");
                        break;

                    case TagCommands.TurnOnLamp:
                        {
                            //await speech.Read("Turn on the light");
                            //var Pesan = Encoding.UTF8.GetBytes("LIGHT_ON");
                            //clientMqtt.PublishMessage(Pesan);
                        }
                        break;

                    case TagCommands.TurnOffLamp:
                        {
                            //await speech.Read("Turn off the light");
                            //var Pesan = Encoding.UTF8.GetBytes("LIGHT_OFF");
                            //clientMqtt.Publish( Pesan);
                        }
                        break;

                    case TagCommands.ReciteQuran:
                        {
                            try
                            {
                                Random rnd = new Random(Environment.TickCount);
                                var surah  = rnd.Next(1, 114);
                                var rslt   = await httpClient.GetAsync($"http://qurandataapi.azurewebsites.net/api/Ayah/GetAyahCountBySurah?Surah={surah}");
                                var ayah   = int.Parse(await rslt.Content.ReadAsStringAsync());
                                ayah       = rnd.Next(1, ayah);
                                rslt       = await httpClient.GetAsync($"http://qurandataapi.azurewebsites.net/api/Ayah/GetMediaByAyah?Surah={surah}&Ayah={ayah}&ReciterId=11");
                                var media  = JsonConvert.DeserializeObject <QuranMedia>(await rslt.Content.ReadAsStringAsync());
                                if (media != null)
                                {
                                    MediaPlayerHelper.CleanUpMediaPlayerSource(Player1.MediaPlayer);
                                    Player1.MediaPlayer.Source = new MediaItem(media.Url).MediaPlaybackItem;
                                    Player1.MediaPlayer.Play();
                                }
                            }
                            catch
                            {
                                await speech.Read("there is problem on the service");
                            }
                        }
                        break;

                    case TagCommands.WhatDate: { await speech.Read("Today is " + DateTime.Now.ToString("dd MMMM yyyy")); }; break;

                    case TagCommands.WhatTime: { await speech.Read("Current time is " + DateTime.Now.ToString("HH:mm")); }; break;

                    default:
                        for (int x = 0; x < Devices.Count; x++)
                        {
                            if (tag == $"TURNON{x}")
                            {
                                SwitchDevice(true, Devices[x].IP);
                                break;
                            }
                            else if (tag == $"TURNOFF{x}")
                            {
                                SwitchDevice(false, Devices[x].IP);
                                break;
                            }
                        }
                        break;
                    }
                });
            }
            else
            {
                // In some scenarios, a developer may choose to ignore giving the user feedback in this case, if speech
                // is not the primary input mechanism for the application.
                await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                {
                    heardYouSayTextBlock.Visibility = Visibility.Collapsed;
                    resultTextBlock.Visibility      = Visibility.Visible;
                    resultTextBlock.Text            = string.Format("Sorry, I didn't catch that. (Heard: '{0}', Tag: {1}, Confidence: {2})", args.Result.Text, tag, args.Result.Confidence.ToString());
                });
            }
        }