public async Task <string> GenerateDescriptionAsnyc(FileStream fileStream) { VisualFeatureTypes[] features = new VisualFeatureTypes[] { VisualFeatureTypes.Description }; var results = await visionClient.AnalyzeImageInStreamAsync(fileStream, features); return(results.Description.Captions?[0].Text); }
// Analyze a local image private static async Task AnalyzeLocalAsync(string CCTVName, string imagePath) { if (!File.Exists(imagePath)) { Console.WriteLine( "\nUnable to open or read localImagePath:\n{0} \n", imagePath); return; } using (Stream imageStream = File.OpenRead(imagePath)) { ImageAnalysis analysis = await computerVision.AnalyzeImageInStreamAsync( imageStream, features); var data = LogAnalysisResult(analysis, imagePath); foreach (var key in keywords) { if (data.Tags.ToLower().Contains(key) || data.Description.ToLower().Contains(key)) { //jika terdapat manusia data.Tanggal = DateTime.Now; var res = await blobHelper.UploadFile(imagePath, CCTVName); data.ImageUrl = res.url; data.CCTVName = CCTVName; data.AssignKey(); var res2 = await tableHelper.InsertData(data); break; } } } }
public async Task <ImageAnalysis> AnalyzeImageAsync(string imageUrl, IList <VisualFeatureTypes> features) { var client = new WebClient(); using Stream stream = client.OpenRead(imageUrl); return(await _visionClient.AnalyzeImageInStreamAsync(stream, features)); }
public async static Task <ImageAnalysis> DescribePicture(MediaFile photo) { ImageAnalysis description = null; try { if (photo != null) { using (var stream = photo.GetStream()) { var credentials = new ApiKeyServiceClientCredentials(Constants.VisionApiKey); var client = new System.Net.Http.DelegatingHandler[] { }; var visionClient = new ComputerVisionClient(credentials, client); visionClient.Endpoint = Constants.VisionEndpoint; var features = new VisualFeatureTypes[] { VisualFeatureTypes.Tags, VisualFeatureTypes.Faces, VisualFeatureTypes.Categories, VisualFeatureTypes.Description, VisualFeatureTypes.Color }; description = await visionClient.AnalyzeImageInStreamAsync(stream, features); } } } catch (Exception ex) { } return(description); }
public static async Task AnalyzeImageUrl(ComputerVisionClient client, string imageUrl) { Console.WriteLine("----------------------------------------------------------"); Console.WriteLine("ANALYZE IMAGE - URL"); Console.WriteLine(); // Creating a list that defines the features to be extracted from the image. List<VisualFeatureTypes?> features = new List<VisualFeatureTypes?>() { VisualFeatureTypes.Categories, VisualFeatureTypes.Description, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Tags, VisualFeatureTypes.Adult, VisualFeatureTypes.Color, VisualFeatureTypes.Brands, VisualFeatureTypes.Objects }; Console.WriteLine($"Analyzing the image {System.IO.Path.GetFileName(imageUrl)}..."); Console.WriteLine(); // Analyze the URL image //ImageAnalysis results = await client.AnalyzeImageAsync(imageUrl, features); ImageAnalysis results = await client.AnalyzeImageInStreamAsync(new StreamReader(imageUrl).BaseStream, features); // Objects peopleCount = 0; foreach (var obj in results.Objects) { if (obj.ObjectProperty.Contains("person")) peopleCount++; resultJsonString += $"ObjectProperty:{obj.ObjectProperty}{Environment.NewLine} Confidence:{obj.Confidence}{Environment.NewLine} Rectangle:[x:{obj.Rectangle.X},y:{obj.Rectangle.Y},w:{obj.Rectangle.W},h:{obj.Rectangle.H}]{Environment.NewLine}"; } }
public async Task <MediaAI> AnalyseImageAsync( Stream imageStream, CancellationToken cancellationToken) { ComputerVisionClient computerVision = _computerVisionClientFunc(); try { ImageAnalysis analysis = await computerVision.AnalyzeImageInStreamAsync( imageStream, Features, cancellationToken : cancellationToken); return(ToMediaAI(analysis)); } catch (ComputerVisionErrorException ex) { Log.Error(ex, "Error in analyse image. Message: {Message}", ex.Response.Content); throw; } catch (Exception ex) { Log.Error(ex, "Error in analyse image"); throw; } }
private async void Button_Click(object sender, RoutedEventArgs e) { var captureUI = new CameraCaptureUI(); captureUI.PhotoSettings.Format = CameraCaptureUIPhotoFormat.Jpeg; var file = await captureUI.CaptureFileAsync(CameraCaptureUIMode.Photo); var features = new List <VisualFeatureTypes>() { VisualFeatureTypes.Categories, VisualFeatureTypes.Description, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Tags }; var VisionServiceClient = new ComputerVisionClient(new ApiKeyServiceClientCredentials(key)); VisionServiceClient.Endpoint = endpoint; using (Stream imageFileStream = await file.OpenStreamForReadAsync()) { var analysisResult = await VisionServiceClient.AnalyzeImageInStreamAsync(imageFileStream, features); var captions = string.Join(Environment.NewLine, analysisResult?.Description?.Captions?.Select(c => $"{c.Text} ({c.Confidence})") ?? Enumerable.Empty <string>()); await new MessageDialog(captions).ShowAsync(); } }
static async Task AnalyzeImage(string imgPath) { Console.Write("Analyzing " + imgPath + " ..."); using (var stImg = new FileStream(imgPath, FileMode.Open)) { var visionClient = new ComputerVisionClient(new ApiKeyServiceClientCredentials(DemoSettings.csVisionKey)) { Endpoint = DemoSettings.csVisionEndpoint }; var features = new VisualFeatureTypes[] { VisualFeatureTypes.Adult, VisualFeatureTypes.Brands, VisualFeatureTypes.Categories, VisualFeatureTypes.Color, VisualFeatureTypes.Description, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Objects, VisualFeatureTypes.Tags }; var imgAnalysis = await visionClient.AnalyzeImageInStreamAsync(stImg, features); PrintAnalysisResult(imgAnalysis); } }
/// <summary> /// Necessary to confuse the await operator. /// Without this wrapper, the API call took much longer to be queued /// for processing. /// </summary> /// <param name="ui_image"></param> /// <returns></returns> public async Task APIWrapper(UIImage ui_image) { using (Stream stream = ui_image.AsJPEG().AsStream()) { analysis = await client.AnalyzeImageInStreamAsync(stream, features); } }
public static async Task <IEnumerable <ImageTag> > GetTagsAsync(MediaFile image) { var subscriptionKey = CognitiveKey.GetCognitiveKey(); // Specify the features to return List <VisualFeatureTypes> features = new List <VisualFeatureTypes> { VisualFeatureTypes.Tags }; ComputerVisionClient computerVision = new ComputerVisionClient( new ApiKeyServiceClientCredentials(subscriptionKey), new System.Net.Http.DelegatingHandler[] { }) { // You must use the same region as you used to get your subscription // keys. For example, if you got your subscription keys from westus, // replace "westcentralus" with "westus". // Specify the Azure region Endpoint = "https://eastus.api.cognitive.microsoft.com" }; // Analyze Images using (Stream imageStream = image.GetStream()) { var analysis = await computerVision.AnalyzeImageInStreamAsync(imageStream, features); return(analysis.Tags); } }
/// <summary> /// Uploads the image to Cognitive Services and performs analysis. /// </summary> /// <param name="imageFilePath">The image file path.</param> /// <returns>Awaitable image analysis result.</returns> private async Task <ImageAnalysis> UploadAndAnalyzeImageAsync(string imageFilePath) { // ----------------------------------------------------------------------- // KEY SAMPLE CODE STARTS HERE // ----------------------------------------------------------------------- // // Create Cognitive Services Vision API Service client. // using (var client = new ComputerVisionClient(Credentials) { Endpoint = Endpoint }) { Log("ComputerVisionClient is created"); using (Stream imageFileStream = File.OpenRead(imageFilePath)) { // // Analyze the image for all visual features. // Log("Calling ComputerVisionClient.AnalyzeImageInStreamAsync()..."); VisualFeatureTypes[] visualFeatures = GetSelectedVisualFeatures(); string language = (_language.SelectedItem as RecognizeLanguage).ShortCode; ImageAnalysis analysisResult = await client.AnalyzeImageInStreamAsync(imageFileStream, visualFeatures, null, language); return(analysisResult); } } // ----------------------------------------------------------------------- // KEY SAMPLE CODE ENDS HERE // ----------------------------------------------------------------------- }
public async Task <AddPictureResult> AddPicture(Stream pictureStream) { try { var result = await _computerVisionClient.AnalyzeImageInStreamAsync(pictureStream, details : new[] { Details.Landmarks }, visualFeatures : new[] { VisualFeatureTypes.Color, VisualFeatureTypes.Description }); // Get most likely description var description = result.Description.Captions.OrderByDescending(d => d.Confidence).FirstOrDefault()?.Text ?? "nothing! No description found"; // Get accent color var accentColor = Color.FromHex($"#{result.Color.AccentColor}"); // Determine if there are any landmarks to be seen var landmark = result.Categories.FirstOrDefault(c => c.Detail != null && c.Detail.Landmarks != null && c.Detail.Landmarks.Any()); var landmarkDescription = ""; landmarkDescription = landmark != null?landmark.Detail.Landmarks.OrderByDescending(l => l.Confidence).First().Name : ""; // Wrap in our result object and send along return(new AddPictureResult(description, accentColor, landmarkDescription)); } catch { return(new AddPictureResult()); } }
private async Task <string> AnalyzeImage() { var title = default(string); var stream = StreamFromBitmapSource(this.bitmap); var analysis = await computerVision.AnalyzeImageInStreamAsync(stream, MainWindow.features); try { foreach (var item in analysis.Objects) { //if (item.ObjectProperty == "person") //{ title = item.ObjectProperty; this.boundingBox = item.Rectangle; //} } } catch (System.Exception ex) { this.StatusText = ex.ToString(); } return(title); }
public static async void Run([BlobTrigger("uploads/{name}")] Stream myBlob, string name, TraceWriter log, ExecutionContext context) { var config = new ConfigurationBuilder() .SetBasePath(context.FunctionAppDirectory) .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) .AddEnvironmentVariables() .Build(); ComputerVisionClient client = new ComputerVisionClient( new ApiKeyServiceClientCredentials(config["computerVisionKey"]), new System.Net.Http.DelegatingHandler[] { }); client.Endpoint = config["computerVisionEndpoint"]; try { var result = await client.AnalyzeImageInStreamAsync(myBlob, features); DisplayResults(result, log, name); Debug.WriteLine(result); } catch (Exception x) { Debug.WriteLine(x); } }
private async Task TheftObjectUploaded(IDialogContext context, IAwaitable <IEnumerable <Attachment> > arg) { var stolen = await arg; StolenObjectImages = stolen.Select(x => x.ContentUrl).ToList(); var att = StolenObjectImages.FirstOrDefault(); if (att != null) { var req = WebRequest.Create(att); var response = req.GetResponse(); using (var stream = response.GetResponseStream()) { var client = new ComputerVisionClient(new ApiKeyServiceClientCredentials(PrivateKeys.VisionApiKey)); client.Endpoint = "https://northeurope.api.cognitive.microsoft.com"; var imageAnalysis = await client.AnalyzeImageInStreamAsync(stream, features); if (ContainsItemOrPseudonym(imageAnalysis.Tags, LUISIssueResult.CurrentResponse.Entities.Where(x => x.Type == Entities.StolenObject).FirstOrDefault()?.Entity)) { await IssueCrimeReferenceNumber(context); } else { PromptDialog.Confirm( context, ConfirmPictureOfStolenObjectIsCorrect, $"That looks like {imageAnalysis.Description.Captions[0].Text}. Are you sure this is a picture of the " + $"{LUISIssueResult.CurrentResponse.Entities.Where(x => x.Type == Entities.StolenObject).FirstOrDefault()?.Entity}", "Sorry, I didn't quite understand you, can you try again?", promptStyle: PromptStyle.None); } } } }
public async Task <CognitiveStep> CognitivePipeline_FaceDetectionBasic([ActivityTrigger] CognitiveStep input, ILogger log) { log.LogInformation($"******* Starting Face Detection"); string key = GlobalSettings.GetKeyValue("computerVisionKey"); string endpoint = GlobalSettings.GetKeyValue("computerVisionEndpoint"); ComputerVisionClient computerVision = new ComputerVisionClient( new Microsoft.Azure.CognitiveServices.Vision.ComputerVision.ApiKeyServiceClientCredentials(key), new System.Net.Http.DelegatingHandler[] { }) { Endpoint = endpoint }; var data = await filesStorageRepo.GetFileAsync(input.FileUrl); var detectionResult = await computerVision.AnalyzeImageInStreamAsync(new MemoryStream(data), new List <VisualFeatureTypes> { VisualFeatureTypes.Faces }); input.IsSuccessful = true; input.Confidence = detectionResult.Faces.Count > 0 ? 1 : 0; input.LastUpdatedAt = DateTime.UtcNow; input.RawOutput = JsonConvert.SerializeObject(detectionResult); return(input); }
public async Task <Dictionary <int, string> > RunG(DescRequest request) { var computerVision = new ComputerVisionClient(new ApiKeyServiceClientCredentials(ConfigHelper.GetSubscriptionKey())) { Endpoint = "https://francecentral.api.cognitive.microsoft.com/" }; var analysisList = new Dictionary <int, string>(); var storageClient = await StorageClient.CreateAsync( GoogleCredential.FromFile(HttpContext.Current.Server.MapPath("~\\keys.json"))); for (var i = 0; i < request.Count; i++) { var imageStream = new MemoryStream(); await storageClient.DownloadObjectAsync("galeata_magica_123", $"{request.Filename}/{i}.jpg", imageStream); var image = Image.FromStream(imageStream); var analysis = await computerVision.AnalyzeImageInStreamAsync(image.ToStream(ImageFormat.Jpeg), Features); var text = analysis.Description.Captions.FirstOrDefault()?.Text ?? string.Empty; analysisList.Add(i, text); } return(analysisList); }
public static async Task RunSampleAsync(string endpoint, string key) { IComputerVisionClient client = new ComputerVisionClient(new ApiKeyServiceClientCredentials(key)) { Endpoint = endpoint }; // Read image file. using (FileStream stream = new FileStream(Path.Combine("Images", "house.jpg"), FileMode.Open)) { // Analyze the image. ImageAnalysis result = await client.AnalyzeImageInStreamAsync( stream, new List <VisualFeatureTypes>() { VisualFeatureTypes.Description, VisualFeatureTypes.Categories, VisualFeatureTypes.Color, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Tags }); Console.WriteLine("The image can be described as: {0}\n", result.Description.Captions.FirstOrDefault()?.Text); Console.WriteLine("Tags associated with this image:\nTag\t\tConfidence"); foreach (var tag in result.Tags) { Console.WriteLine("{0}\t\t{1}", tag.Name, tag.Confidence); } Console.WriteLine("\nThe primary colors of this image are: {0}", string.Join(", ", result.Color.DominantColors)); } }
private static async Task <ImageAnalysis> AnalyseMemoryStreamImage(Bitmap image) { var imageStream = new MemoryStream(); image.Save(imageStream, System.Drawing.Imaging.ImageFormat.Png); imageStream.Position = 0; return(await _computerVision.AnalyzeImageInStreamAsync(imageStream, _features)); }
/// <summary>Analyze a local image</summary> /// <param name="client">Authenticated Computer Vision Client</param> /// <returns></returns> private static async Task <ImageAnalysis> AnalyzeImageAsync(ComputerVisionClient client) { string localImagePath = @".\Untitled5.png"; using (Stream imageStream = File.OpenRead(localImagePath)) { return(await client.AnalyzeImageInStreamAsync(imageStream, returnAttributes)); } }
private async Task AnalyzeLocalAsync(ComputerVisionClient computerVision, string imagePath) { using (Stream imageStream = File.OpenRead(imagePath)) { ImageAnalysis analysis = await computerVision.AnalyzeImageInStreamAsync(imageStream, Features); DisplayResults(analysis); } }
private static async Task <ImageAnalysis> AnalyzeLocalAsync(ComputerVisionClient computerVision, StorageFile imageFile) { using (Stream imageStream = File.OpenRead(imageFile.Path)) { var analysis = await computerVision.AnalyzeImageInStreamAsync(imageStream, _visualFeatures); return(analysis); } }
// Analyze a local image public async Task <ImageAnalysis> AnalyzeLocalAsync(string imagePath) { if (!File.Exists(imagePath)) { throw new Exception($"Unable to open or read localImagePath: {imagePath}"); } using (var imageStream = File.OpenRead(imagePath)) return(await _computerVision.AnalyzeImageInStreamAsync(imageStream, _features).ConfigureAwait(false)); }
public async static Task SendMessages( [HttpTrigger(AuthorizationLevel.Anonymous, "post")] object message, [SignalR(HubName = "chat")] IAsyncCollector <SignalRMessage> signalRMessages) { var jsonObject = (JObject)message; var msg = jsonObject.ToObject <Message>(); if (msg.TypeInfo.Name == nameof(PhotoMessage)) { var photoMessage = jsonObject.ToObject <PhotoMessage>(); var bytes = Convert.FromBase64String(photoMessage.Base64Photo); var stream = new MemoryStream(bytes); var subscriptionKey = Environment.GetEnvironmentVariable("ComputerVisionKey"); var computerVision = new ComputerVisionClient(new ApiKeyServiceClientCredentials(subscriptionKey), new DelegatingHandler[] { }); computerVision.Endpoint = Environment.GetEnvironmentVariable("ComputerVisionEndpoint"); var features = new List <VisualFeatureTypes>() { VisualFeatureTypes.Adult }; var result = await computerVision.AnalyzeImageInStreamAsync(stream, features); if (result.Adult.IsAdultContent) { return; } var url = await StorageHelper.Upload(bytes, photoMessage.FileEnding); msg = new PhotoUrlMessage(photoMessage.Username) { Id = photoMessage.Id, Timestamp = photoMessage.Timestamp, Url = url }; await signalRMessages.AddAsync(new SignalRMessage { Target = "newMessage", Arguments = new[] { message } }); return; } await signalRMessages.AddAsync(new SignalRMessage { Target = "newMessage", Arguments = new[] { message } }); }
private async Task <ImageAnalysis> DescribeImageFromStreamAsync(ImageData imageData) { return(await _computerVisionClient.AnalyzeImageInStreamAsync(imageData.BinaryData.OpenRead(), new List <VisualFeatureTypes>() { VisualFeatureTypes.Categories, VisualFeatureTypes.Description, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Tags, VisualFeatureTypes.Adult, VisualFeatureTypes.Color, VisualFeatureTypes.Brands, VisualFeatureTypes.Objects })); }
public async Task <Result> RecognizeAsync(Input input) { using (var imageStream = File.OpenRead(input.FilePath)) { _logger.LogInformation($"Sending for analysis: {input.FilePath}"); return(new Result( input, await _computerVision.AnalyzeImageInStreamAsync(imageStream, Features))); } }
private async Task <ImageAnalysis> UploadAndAnalyzeImage(StorageFile imageFile) { var stream = await imageFile.OpenStreamForReadAsync(); Log("Calling VisionServiceClient.AnalyzeImageAsync()..."); VisualFeatureTypes?[] visualFeatures = new VisualFeatureTypes?[] { VisualFeatureTypes.Adult, VisualFeatureTypes.Categories, VisualFeatureTypes.Color, VisualFeatureTypes.Description, VisualFeatureTypes.Faces, VisualFeatureTypes.ImageType, VisualFeatureTypes.Tags }; var analysisResult = await VisionServiceClient.AnalyzeImageInStreamAsync(stream, visualFeatures); return(analysisResult); }
public async Task <IActionResult> AnalyzeImage([FromBody] ImageUploadModel data) { var features = new VisualFeatureTypes[] { VisualFeatureTypes.Tags, VisualFeatureTypes.Description }; using (Stream imageStream = new MemoryStream(Convert.FromBase64String(data.value))) { ImageAnalysis analysis = await _computerVisionService.AnalyzeImageInStreamAsync(imageStream, features); return(Ok(analysis)); } }
public string GetMetadata(Stream stream) { using (Stream imageFileStream = stream) { // // Analyze the image for all visual features. // //VisualFeatureTypes[] visualFeatures = GetSelectedVisualFeatures(); ImageAnalysis analysisResult = client.AnalyzeImageInStreamAsync(imageFileStream, null, null).Result; return(JsonConvert.SerializeObject(analysisResult)); } }
public static async Task <(bool, string)> PassesImageModerationAsync(Stream image) { var client = new ComputerVisionClient( new ApiKeyServiceClientCredentials(ApiKey), new DelegatingHandler[] { }); var result = await client.AnalyzeImageInStreamAsync(image, VisualFeatures); bool containsCat = result.Description.Tags.Take(5).Contains(SearchTag); string message = result?.Description?.Captions.FirstOrDefault()?.Text; return(containsCat, message); }