public async Task <HttpResponseMessage> Post([FromBody] Activity activity) { if (activity.Type == ActivityTypes.Message) { // Stores send images out of order. var connector = new ConnectorClient(new Uri(activity.ServiceUrl)); var imageAttachment = activity.Attachments?.FirstOrDefault(a => a.ContentType.Contains("image")); if (imageAttachment != null) { LastImage = await GetImageStream(connector, imageAttachment); LastImageName = imageAttachment.Name; LastImageType = imageAttachment.ContentType; ComputerVisionResult computerVisionResult = await GetImageInfo(LastImage); LastImageTags = String.Join(", ", computerVisionResult.Tags); LastImageDescription = computerVisionResult.Description; String replyText = "Got your image!"; if (!String.IsNullOrWhiteSpace(computerVisionResult.Text)) { replyText += $" It probably shows { computerVisionResult.Text}."; } Activity reply = activity.CreateReply(replyText); await connector.Conversations.ReplyToActivityAsync(reply); } else { // Creates a dialog stack for the new conversation, adds MainDialog to the stack, and forwards all messages to the dialog stack. await Conversation.SendAsync(activity, () => new MainDialog()); } } var response = Request.CreateResponse(HttpStatusCode.OK); return(response); }
private static async Task <ComputerVisionResult> GetImageInfo(Stream imageStream) { ComputerVisionResult result = new ComputerVisionResult(); try { // Call cognitive services. var jsonResult = string.Empty; using (HttpClient client = new HttpClient()) { // Request headers. client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", ConfigurationManager.AppSettings["AZURE_COGNITIVE_SERVICES_KEY"]); var queryString = HttpUtility.ParseQueryString(string.Empty); queryString["visualFeatures"] = "Description,Tags"; queryString["language"] = "en"; // Assemble the URI for the REST API Call. string uri = ConfigurationManager.AppSettings["AZURE_COGNITIVE_SERVICES_URI"] + queryString; // Request body. Posts a locally stored JPEG image. byte[] byteData = null; using (MemoryStream ms = new MemoryStream()) { imageStream.CopyTo(ms); if (ms.Length >= IMAGE_SIZE_LIMIT) { throw new ArgumentException($"Images size should be less than {IMAGE_SIZE_LIMIT / 1024} Kb"); } byteData = ms.ToArray(); } using (ByteArrayContent content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); // Execute the REST API call. var response = await client.PostAsync(uri, content); if (response.StatusCode != HttpStatusCode.OK) { throw new Exception("Image analysis failed."); } // Get the JSON response. jsonResult = await response.Content.ReadAsStringAsync(); } } // Retrieve only tags JObject json = JObject.Parse(jsonResult); JToken captions = json["description"]["captions"]; JToken tags = json["tags"]; String caption = captions.First()["text"].Value <String>(); Double captionConfidence = captions.First()["confidence"].Value <Double>(); if (captionConfidence >= 0.8) { result.Description = caption; result.Text = caption; } foreach (JToken item in tags) { String tag = item["name"].Value <String>(); Double confidence = item["confidence"].Value <Double>(); if (confidence >= 0.8) { result.Tags.Add(tag); if (String.IsNullOrWhiteSpace(result.Text)) { result.Text = tag; } } } } catch (Exception ex) { result.Description = ex.Message; } return(result); }