private async void btnCheckEmotion_Clicked(object sender, EventArgs e) { ObservableCollection <PersonEmotion> DetectedEmotions = new ObservableCollection <PersonEmotion>(); var emotionClient = new EmotionServiceClient("f6b14f92fda541d68e759c37abaa7f32"); Emotion[] emotionResults = await emotionClient.RecognizeAsync(imageFile.GetStream()); foreach (var emotion in emotionResults) { DetectedEmotions.Add(new PersonEmotion() { Happiness = emotion.Scores.Happiness, Fear = emotion.Scores.Fear, Disgust = emotion.Scores.Disgust, Sadness = emotion.Scores.Sadness }); } PersonEmotion mainEmotion = DetectedEmotions.FirstOrDefault(); if (mainEmotion.Happiness > 0.5f) { await DisplayAlert("Emotion", "You are happy", "OK"); } else { await DisplayAlert("Emotion", "You are not happy", "OK"); } }
private async void StartButton_Click(object sender, RoutedEventArgs e) { if (!CameraList.HasItems) { MessageArea.Text = "No cameras found; cannot start processing"; return; } // Clean leading/trailing spaces in API keys. Properties.Settings.Default.FaceAPIKey = Properties.Settings.Default.FaceAPIKey.Trim(); Properties.Settings.Default.EmotionAPIKey = Properties.Settings.Default.EmotionAPIKey.Trim(); Properties.Settings.Default.VisionAPIKey = Properties.Settings.Default.VisionAPIKey.Trim(); // Create API clients. _faceClient = new FaceServiceClient(Properties.Settings.Default.FaceAPIKey, Properties.Settings.Default.FaceAPIHost); _emotionClient = new EmotionServiceClient(Properties.Settings.Default.EmotionAPIKey, Properties.Settings.Default.EmotionAPIHost); _visionClient = new VisionServiceClient(Properties.Settings.Default.VisionAPIKey, Properties.Settings.Default.VisionAPIHost); // How often to analyze. _grabber.TriggerAnalysisOnInterval(Properties.Settings.Default.AnalysisInterval); // Reset message. MessageArea.Text = ""; // Record start time, for auto-stop _startTime = DateTime.Now; await _grabber.StartProcessingCameraAsync(CameraList.SelectedIndex); }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void Page_Loaded(object sender, RoutedEventArgs e) { //クライアント client = new EmotionServiceClient("{your subscription key}"); //キャプチャーの設定 MediaCaptureInitializationSettings captureInitSettings = new MediaCaptureInitializationSettings(); captureInitSettings.StreamingCaptureMode = StreamingCaptureMode.Video; captureInitSettings.PhotoCaptureSource = PhotoCaptureSource.VideoPreview; var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture); captureInitSettings.VideoDeviceId = devices[0].Id; capture = new MediaCapture(); await capture.InitializeAsync(captureInitSettings); //キャプチャーのサイズなど VideoEncodingProperties vp = new VideoEncodingProperties(); vp.Width = 320; vp.Height = 240; vp.Subtype = "YUY2"; await capture.VideoDeviceController.SetMediaStreamPropertiesAsync(MediaStreamType.VideoPreview, vp); preview.Source = capture; await capture.StartPreviewAsync(); }
static async void CallProjectOxford(string imageFilePath) { // Declare an EmotionServiceClient object, we will use this object to communicate with our Emotion API EmotionServiceClient client = new EmotionServiceClient(ApiKey); // Convert the Image file to a MemoryStream MemoryStream mem = new MemoryStream(GetImageAsByteArray(imageFilePath)); // Store the result in an emotion list // With this 3 lines we already make all the communications with the API and store the result // ProjectOxford libraries help us to work easier with cognitive services APIs. IEnumerable <Emotion> emotionList = await client.RecognizeAsync(mem); // Print all the dimensions and emotion scores foreach (Emotion emotion in emotionList) { Console.WriteLine("Face"); Console.WriteLine($"Top: {emotion.FaceRectangle.Top.ToString()}, Width: {emotion.FaceRectangle.Width.ToString()}" + $"Left: {emotion.FaceRectangle.Left.ToString()}, Height: {emotion.FaceRectangle.Height.ToString()}"); Console.WriteLine(); Console.WriteLine("Emotion"); Console.WriteLine($"Anger: {emotion.Scores.Anger.ToString()}," + $"Contempt: {emotion.Scores.Contempt.ToString()}," + $"Disgust: {emotion.Scores.Disgust.ToString()}," + $"Fear: {emotion.Scores.Fear.ToString()}," + $"Happiness: {emotion.Scores.Happiness.ToString()}," + $"Neutral: {emotion.Scores.Neutral.ToString()}," + $"Sadness: {emotion.Scores.Sadness.ToString()}," + $"Surprise: {emotion.Scores.Surprise.ToString()}"); } }
protected async void Page_Load(object sender, EventArgs e) { var res = await Search("bill+gates"); var cli = new EmotionServiceClient("c89be8532536452d996130a6083d3ca5"); float h = 0, a = 0, s = 0; foreach (var x in res) { var r = await cli.RecognizeAsync(x); if (r != null && r.Length > 0) { var f = r[0]; if (f.Scores.Happiness > h) { h = f.Scores.Happiness; ha.ImageUrl = x; } if (f.Scores.Anger > a) { a = f.Scores.Anger; an.ImageUrl = x; } if (f.Scores.Surprise > s) { s = f.Scores.Surprise; su.ImageUrl = x; } await Task.Delay(1000); } } }
async void OnEmotion(object sender, RoutedEventArgs e) { if (this.cameraDisplay.CapturedPhotoStream != null) { var emotionClient = new EmotionServiceClient(Keys.EmotionKey); var results = await emotionClient.RecognizeAsync( this.cameraDisplay.CapturedPhotoStream.AsStreamForRead()); var legend = new StringBuilder(); foreach (var person in results) { var emotionScores = person.Scores.AsDictionary(); var labelledScores = emotionScores .OrderByDescending(entry => entry.Value) .Select( entry => new KeyValuePair <string, string>( entry.Key, LabelFromConfidenceValue(entry.Key, entry.Value))); var listOfScores = string.Join( ", ", labelledScores.Select(entry => entry.Value)); legend.AppendLine(listOfScores); } this.cameraDisplay.ShowLegend(legend.ToString()); } }
public static async Task <Emotion []> GetHappinessAsync(Stream stream) { var emotionClient = new EmotionServiceClient(APIKeys.EMOTION_API); var emotions = await emotionClient.RecognizeAsync(stream); return(emotions); }
/// <summary> /// Uploads the image to Project Oxford and detect emotions. /// </summary> /// <param name="imageFilePath">The image file path.</param> /// <returns></returns> private async Task <Emotion[]> UploadAndDetectEmotions(string url) { Debug.WriteLine("EmotionServiceClient is created"); // // Create Project Oxford Emotion API Service client // EmotionServiceClient emotionServiceClient = new EmotionServiceClient(_subscriptionKey); Debug.WriteLine("Calling EmotionServiceClient.RecognizeAsync()..."); try { // // Detect the emotions in the URL // Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(url); return(emotionResult); } catch (Exception exception) { Debug.WriteLine("Detection failed. Please make sure that you have the right subscription key and proper URL to detect."); Debug.WriteLine(exception.ToString()); return(null); } }
/// <summary> /// Uploads the video to Project Oxford and detects emotions /// </summary> /// <param name="imageFilePath"></param> /// <returns></returns> public static async Task <Emotion[]> UploadAndStreamDetectEmotionsAsync(string imageFilePath) { // Create Project Oxford Emotion API Service client try { var index = random.Next(0, subscriptionKeyArray.Length); var renderKey = subscriptionKeyArray[index]; EmotionServiceClient emotionServiceMaterClient = new EmotionServiceClient(renderKey); using (Stream imageFileStream = File.OpenRead(imageFilePath)) { //Emotion[] emotionResult = null; // Detect the emotions in the URL var emotionResult = await emotionServiceMaterClient.RecognizeAsync(imageFileStream); if (emotionResult == null) { renderKey = subscriptionKeyArray.FirstOrDefault(z => z != renderKey); var emotionServiceSlaveClient = new EmotionServiceClient(renderKey); emotionResult = await emotionServiceSlaveClient.RecognizeAsync(imageFileStream); } //var emotionResult = Task.Run(() => // Instance.RecognizeAsync(imageFileStream)).Result; return(emotionResult); } } catch (Exception ex) { return(null); } }
public async Task <VisionResult> Analyze(Stream stream) { var client = new EmotionServiceClient(Constants.EmotionApiKey, Constants.EmotionApiEndpoint); var attributes = new List <VisionAttribute>(); var rectangles = new List <Rectangle>(); using (stream) { var emotionResult = await client.RecognizeAsync(stream); if (emotionResult != null && emotionResult.Length > 0) { for (int i = 0; i < emotionResult.Length; i++) { var emotion = emotionResult[i]; rectangles.Add(emotion.FaceRectangle.ToRectangle()); foreach (var score in emotion.Scores.ToRankedList()) { attributes.Add(new VisionAttribute($"Pessoa {i}", score.Key, score.Value)); } } } } return(new VisionResult { Attributes = attributes, Rectangles = rectangles }); }
private static async Task <FaceResult[]> GetHappinessAsync(Stream stream) { var loading = UserDialogs.Instance.Loading("Analyzing..."); loading.Show(); var emotionClient = new EmotionServiceClient( CognitiveServicesKeys.Emotion); try { var emotionResults = await emotionClient.RecognizeAsync(stream); if (emotionResults == null || emotionResults.Count() == 0) { throw new Exception("Can't detect face"); } return(emotionResults); } finally { loading.Hide(); } }
public FaceMetaData(string facekey, string emotionKey) { _seenAlready = new List <Guid>(); // Initialize the face service client _faceServiceClient = new FaceServiceClient(facekey); _emotionServiceClient = new EmotionServiceClient(emotionKey); }
private async void getEmotionResult(MediaFile file) { _currentFile = file; var emotionServiceClient = new EmotionServiceClient(Config.EmotionApiKey); using (Stream stream = file.GetStream()) { _score = 0; IsLoading = true; CanDoTakePicture = false; CanConfirmPicture = false; EmotionResultText = "分析中..."; try { await analyzePictureToResult(emotionServiceClient, stream); } catch { EmotionResultText = "分析過程失敗,請檢查網路是否暢通,或換張圖片,或稍後再試試看"; } IsLoading = false; CanDoTakePicture = true; } }
public MainPage() { this.InitializeComponent(); Application.Current.Suspending += Current_Suspending; _emotionClient = new EmotionServiceClient("KEY"); }
public async Task <ObservableCollection <Face> > UpdateEmotions(string selectedFile, ObservableCollection <Face> faces, string subscriptionKeyEmotions) { using (var fileStreamEmotions = File.OpenRead(selectedFile)) { var emotionServiceClient = new EmotionServiceClient(subscriptionKeyEmotions); var emotions = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faces.Select( face => new Rectangle { Height = face.Height, Left = face.Left, Top = face.Top, Width = face.Width }).ToArray()); foreach (var emotion in emotions) { foreach (var face in faces.Where(face => face.Height == emotion.FaceRectangle.Height && face.Left == emotion.FaceRectangle.Left && face.Top == emotion.FaceRectangle.Top && face.Width == emotion.FaceRectangle.Width)) { var mappedScores = AutoMapper.Mapper.Map <Microsoft.ProjectOxford.Emotion.Contract.Scores>(emotion.Scores); face.Scores = mappedScores; face.CalculateEmotion(); } } return(faces); } }
/// <summary> /// 粘贴URL时 图片加载完毕 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void imgPhoto_ImageOpened(object sender, RoutedEventArgs e) { size_image = new Size((imgPhoto.Source as BitmapImage).PixelWidth, (imgPhoto.Source as BitmapImage).PixelHeight); //请求API FaceServiceClient f_client = new FaceServiceClient(key_face); EmotionServiceClient e_client = new EmotionServiceClient(key_emotion); var requiedFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Smile, FaceAttributeType.FacialHair, FaceAttributeType.HeadPose, FaceAttributeType.Glasses }; var faces_task = f_client.DetectAsync(txtLocation.Text, true, true, requiedFaceAttributes); var emotion_task = e_client.RecognizeAsync(txtLocation.Text); faces = await faces_task; emotions = await emotion_task; if (faces != null) { DisplayFacesData(faces); } if (emotions != null) { DisplayEmotionsData(emotions); } ringLoading.IsActive = false; }
public async void getemotions() { string subscriptionKey = ConfigurationManager.AppSettings["subscriptionKey"].ToString(); EmotionServiceClient emotionServiceClient = new EmotionServiceClient(subscriptionKey); try { Emotion[] emotionResult; byte[] data = System.Convert.FromBase64String(Request.QueryString["formfield"]); using (Stream imageFileStream = new MemoryStream(data)) //File.OpenRead(Request.Form["image"])) { // // Detect the emotions in the URL // emotionResult = await emotionServiceClient.RecognizeAsync(imageFileStream); //return emotionResult; } } catch (Exception exception) { //return null; } }
private async void Timer_Tick(object sender, object e) { timer.Stop(); await mediaCapture.InitializeAsync(); var lowLagCapture = await mediaCapture.PrepareLowLagPhotoCaptureAsync(ImageEncodingProperties.CreateJpeg()); var capturedPhoto = await lowLagCapture.CaptureAsync(); EmotionServiceClient emotionServiceClient = new EmotionServiceClient("638268f88e154b588fabdf134ee3e63e"); //SoftwareBitmap softwareBitmap = capturedPhoto.Frame.SoftwareBitmap; //SoftwareBitmap softwareBitmapBGR8 = SoftwareBitmap.Convert(softwareBitmap, // BitmapPixelFormat.Bgra8, // BitmapAlphaMode.Premultiplied); //SoftwareBitmapSource bitmapSource = new SoftwareBitmapSource(); //await bitmapSource.SetBitmapAsync(softwareBitmapBGR8); //imgCaptured.Source = bitmapSource; Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(capturedPhoto.Frame.AsStreamForRead()); processEmotion(emotionResult); await lowLagCapture.FinishAsync(); }
private static async Task <Emotion[]> DetectEmotions(string imageUrl, string emotionServiceApiKey, TraceWriter log) { EmotionServiceClient emotionServiceClient = new EmotionServiceClient(emotionServiceApiKey); int retriesLeft = int.Parse(CloudConfigurationManager.GetSetting("CognitiveServicesRetryCount")); int delay = int.Parse(CloudConfigurationManager.GetSetting("CognitiveServicesInitialRetryDelayms")); Emotion[] response = null; while (true) { try { response = await emotionServiceClient.RecognizeAsync(imageUrl); break; } catch (ClientException exception) when(exception.HttpStatus == (HttpStatusCode)429 && retriesLeft > 0) { log.Info($"Emotion API call has been throttled. {retriesLeft} retries left."); if (retriesLeft == 1) { log.Warning($"Emotion API call still throttled after {CloudConfigurationManager.GetSetting("CognitiveServicesRetryCount")} attempts, giving up."); } await Task.Delay(delay); retriesLeft--; delay *= 2; continue; } } return(response); }
public async Task <Emotion[]> GetEmotion() { try { var emotionServiceClient = new EmotionServiceClient("keyemotio"); var emotion = await emotionServiceClient.RecognizeAsync(this.ImageResult.Url); this.EmotionCollection = emotion; var image = new EmotionView { Anger = emotion[0].Scores.Anger * 100, Nombre = ImageResult.Nombre, Url = ImageResult.Url, Disgust = emotion[0].Scores.Disgust * 100, Contempt = emotion[0].Scores.Contempt * 100, Fear = emotion[0].Scores.Fear * 100, Happiness = emotion[0].Scores.Happiness * 100, Neutral = emotion[0].Scores.Neutral * 100, Sadness = emotion[0].Scores.Sadness * 100, Surprise = emotion[0].Scores.Surprise * 100 }; ImageCollection.Add(image); return(emotion); } catch (Exception) { return(null); } }
public async Task <string> GetEmotion(string imageFileName) { var file = File.OpenRead(imageFileName); EmotionServiceClient client = new EmotionServiceClient(EMOTION_API_KEY); var result = await client.RecognizeAsync(file); Emotion emotion = result.OrderByDescending(e => e.FaceRectangle.Height * e.FaceRectangle.Width).FirstOrDefault(); if (emotion == null || emotion.Scores == null) { return(null); } Dictionary <string, float> probabilities = new Dictionary <string, float>(); probabilities.Add("Anger", emotion.Scores.Anger); probabilities.Add("Contempt", emotion.Scores.Contempt); probabilities.Add("Disgust", emotion.Scores.Disgust); probabilities.Add("Happiness", emotion.Scores.Happiness); probabilities.Add("Fear", emotion.Scores.Fear); probabilities.Add("Neutral", emotion.Scores.Neutral); probabilities.Add("Sadness", emotion.Scores.Sadness); probabilities.Add("Surprise", emotion.Scores.Surprise); return(probabilities.FirstOrDefault(p => p.Value == probabilities.Max(pv => pv.Value)).Key); }
public CognitiveService() { // Face APIs FaceServiceClient = new FaceServiceClient("25cb663824684ae88357c0f7f0c460ac"); // Emotion APIs EmotionServiceClient = new EmotionServiceClient("f8b92cbe5d704194a05df3f1322a00e6"); }
public FacialRecognition() { InitializeComponent(); Title = "Facial Recognition"; Icon = "Songs.png"; emotionClient = new EmotionServiceClient(Constants.EmotionApiKey); }
public async Task RunAsync() { StreamWriter sw = new StreamWriter("./" + org + ".csv"); sw.WriteLine("file,result,Anger,Contempt,Disgust,Fear,Happiness,Neutral,Sadness,Surprise"); EmotionServiceClient emotionServiceClient = new EmotionServiceClient(key); var images = Directory.GetFiles(imgFolder).Where(img => img.EndsWith(".jpg")); var progress = 1; foreach (var image in images) { var info = new FileInfo(image); Console.Write(info.Name + $"({progress++}/{images.Count()})..."); try { await writeEmotionResultToStreamAsync(sw, emotionServiceClient, image, info); } catch (Exception exception) { Console.WriteLine(exception.Message); } System.Threading.Thread.Sleep(3000); } sw.Close(); Console.WriteLine("All Emotions Done"); }
/// <summary> /// Upload the StorageFile to Azure Blob Storage /// </summary> /// <param name="file">The StorageFile to upload</param> /// <returns>null</returns> /// private void InitializeEmotionEngine() { AppStatus.Text = "EmotionServiceClient is created"; // Create Project Oxford Emotion API Service client. emotionServiceClient = new EmotionServiceClient(oxfordEmotionKey); }
private async void StartWork(object sender, RoutedEventArgs e) { var Ox = new EmotionServiceClient(APIKeyOx); var l = await Search("bill+gates"); gv.ItemsSource = l; float max = 0; foreach (var x in l) { var res = await Ox.RecognizeAsync(x.Url); if (res.Length > 0) { var n = res[0].Scores.Anger; x.Value = (int)(n * 1000); if (n > max) { max = n; img.Source = x.Img; } } await Task.Delay(300); } gv.ItemsSource = null; gv.ItemsSource = l; }
private async Task <ObservableCollection <Face> > UpdateEmotions(string selectedFile, ObservableCollection <Face> faces) { using (var fileStreamEmotions = File.OpenRead(selectedFile)) { var emotionServiceClient = new EmotionServiceClient(_subscriptionKeyEmotions); var emotions = await emotionServiceClient.RecognizeAsync(fileStreamEmotions, faces.Select( face => new Rectangle { Height = face.Height, Left = face.Left, Top = face.Top, Width = face.Width }).ToArray()); foreach (var emotion in emotions) { foreach (var face in faces.Where(face => face.Height == emotion.FaceRectangle.Height && face.Left == emotion.FaceRectangle.Left && face.Top == emotion.FaceRectangle.Top && face.Width == emotion.FaceRectangle.Width)) { face.Scores = emotion.Scores; face.CalculateEmotion(); } } return(faces); } }
public async Task <string> GetMemeCaptionAsync(Stream stream) { var client = new EmotionServiceClient(EmotionApiKey); var result = await client.RecognizeAsync(stream); return(ProcessEmotionAnalysisResult(result)); }
async Task <string> GetPhotoEmotion(MediaFile file) { var imageStream = file.GetStream(); var emotionServiceClient = new EmotionServiceClient(emotionServiceKey); Emotion[] emotionResult = await emotionServiceClient.RecognizeAsync(imageStream); if (!emotionResult.Any()) { return(null); } var faceEmotion = emotionResult[0]?.Scores; var dictionary = new Dictionary <string, double>(); dictionary.Add("happy", faceEmotion.Happiness); dictionary.Add("angry", faceEmotion.Anger); dictionary.Add("sad", faceEmotion.Sadness); dictionary.Add("disgusted", faceEmotion.Disgust); dictionary.Add("contemptuous", faceEmotion.Contempt); dictionary.Add("suprised", faceEmotion.Surprise); dictionary.Add("scared", faceEmotion.Fear); dictionary.Add("neutral", faceEmotion.Neutral); var search = dictionary.OrderByDescending((KeyValuePair <string, double> arg) => arg.Value).Select(x => x.Key).First(); return(search); }
/// <summary> /// Initializes the emotion detection based on the application resources configuration. /// </summary> public EmotionDetection() { var resources = ResourceLoader.GetForCurrentView("/RobbieSenses/Resources"); var emotionApiKey = resources.GetString("EmotionAPIKey"); emotionServiceClient = new EmotionServiceClient(emotionApiKey); }
public async Task Load(Stream stream) { IsDataLoading = true; EmotionServiceClient esc = new EmotionServiceClient("4064c52bfb044805a39d2d3c33749f44"); Emotions = await esc.RecognizeAsync(stream); IsDataLoading = false; }