/// <summary> /// Takes an array of bytes that's a camera output and predicts face locations. /// </summary> /// <param name="imageBytes">the image bytes</param> /// <param name="actualWidth">the width of the camera output pane</param> /// <param name="actualHeight">the height of the camera output pane</param> /// <returns>the rectangles where faces are located</returns> public async Task <List <Rect> > PredictFaces(byte[] imageBytes, double actualWidth, double actualHeight) { var image = new ClarifaiFileImage(imageBytes); var response = await _clarifaiClient.PublicModels.FaceDetectionModel .Predict(image) .ExecuteAsync(); if (!response.IsSuccessful) { throw new Exception("Error: " + response.Status.Description); } var rects = new List <Rect>(); foreach (FaceDetection face in response.Get().Data) { Crop crop = face.Crop; double top = (double)crop.Top * actualHeight; double left = (double)crop.Left * actualWidth; double bottom = (double)crop.Bottom * actualHeight; double right = (double)crop.Right * actualWidth; double width = right - left; double height = bottom - top; rects.Add(new Rect(top, left, width, height)); } return(rects); }
public async Task WorkflowPredictFileImageShouldBeSuccessful() { var response = await Client.WorkflowPredict( "food-and-general", new ClarifaiFileImage(ReadResource(BALLOONS_IMAGE_FILE))) .ExecuteAsync(); Assert.True(response.IsSuccessful); WorkflowResult result = response.Get().WorkflowResult; Assert.AreEqual(2, result.Predictions.Count); Assert.NotNull(result.Predictions[0].Data); Assert.NotNull(result.Predictions[1].Data); ClarifaiFileImage fileImage = (ClarifaiFileImage)result.Input; Assert.NotNull(fileImage.Bytes); }
/// <summary> /// Takes an array of bytes that's a camera output and predicts concepts located on it /// using a selected model. /// </summary> /// <param name="imageBytes">the image bytes</param> /// <param name="selectedModel">the selected model</param> /// <returns></returns> public async Task <string> PredictConcepts(byte[] imageBytes, string selectedModel) { var image = new ClarifaiFileImage(imageBytes); string modelID = _models[selectedModel]; var response = await _clarifaiClient.Predict <Concept>(modelID, image) .ExecuteAsync(); if (!response.IsSuccessful) { throw new Exception(response.Status.Description); } List <Concept> concepts = response.Get().Data; return(string.Join( "\n", concepts.Select(cc => string.Format("{0} ({1:0.00}%)", cc.Name, cc.Value * 100)))); }
public static async Task <List <TagSuggestion> > RequestWorkflowAnalysis(string imageFilePath, params ImageAnalysisType[] categories) { HashSet <TagSuggestion> result = new HashSet <TagSuggestion>(); var bytes = File.ReadAllBytes(imageFilePath); var input = new ClarifaiFileImage(bytes); var res = await clarifaiClient.WorkflowPredict("workflow", input).ExecuteAsync(); if (res.IsSuccessful) { var predictions = res.Get().WorkflowResult.Predictions; result.AddRange(ParsePredictions(imageFilePath, predictions)); } else { MessageBox.Show("Clarifai Analysis was not successful! Check your internet connection and api key, and you have a workflow named 'workflow'"); } result.AddRange(VisionAPISuggestions.VisionApi.RequestVisionAnalysis(imageFilePath)); return(new List <TagSuggestion>(result)); }
public static SearchBy ImageVisually(ClarifaiFileImage fileImage, Crop crop = null) { return(ImageVisually(fileImage.Bytes, crop)); }
public IEnumerable <IMachineTag> GetTagsForImageBytes(byte[] bytes) { var clarifaiInput = new ClarifaiFileImage(bytes); return(this.GetTagsForInput(clarifaiInput)); }
public IEnumerable <string> GetTagsForImageBytes(byte[] imageBytes) { var clarifaiInput = new ClarifaiFileImage(imageBytes); return(this.GetTagsForInput(clarifaiInput)); }